gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.internal; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import org.apache.lucene.search.Explanation; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.highlight.HighlightField; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.common.lucene.Lucene.readExplanation; import static org.elasticsearch.common.lucene.Lucene.writeExplanation; import static org.elasticsearch.search.SearchShardTarget.readSearchShardTarget; import static org.elasticsearch.search.highlight.HighlightField.readHighlightField; import static org.elasticsearch.search.internal.InternalSearchHitField.readSearchHitField; /** * */ public class InternalSearchHit implements SearchHit { private static final Object[] EMPTY_SORT_VALUES = new Object[0]; private static final Text MAX_TERM_AS_TEXT = new StringAndBytesText(BytesRefFieldComparatorSource.MAX_TERM.utf8ToString()); private transient int docId; private float score = Float.NEGATIVE_INFINITY; private Text id; private Text type; private InternalNestedIdentity nestedIdentity; private long version = -1; private BytesReference source; private Map<String, SearchHitField> fields = ImmutableMap.of(); private Map<String, HighlightField> highlightFields = null; private Object[] sortValues = EMPTY_SORT_VALUES; private String[] matchedQueries = Strings.EMPTY_ARRAY; private Explanation explanation; @Nullable private SearchShardTarget shard; private Map<String, Object> sourceAsMap; private byte[] sourceAsBytes; private Map<String, InternalSearchHits> innerHits; private InternalSearchHit() { } public InternalSearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) { this.docId = docId; this.id = new StringAndBytesText(id); this.type = type; this.fields = fields; } public InternalSearchHit(int nestedTopDocId, String id, Text type, InternalNestedIdentity nestedIdentity, Map<String, SearchHitField> fields) { this.docId = nestedTopDocId; this.id = new StringAndBytesText(id); this.type = type; this.nestedIdentity = nestedIdentity; this.fields = fields; } public int docId() { return this.docId; } public void shardTarget(SearchShardTarget shardTarget) { this.shard = shardTarget; if (innerHits != null) { for (InternalSearchHits searchHits : innerHits.values()) { searchHits.shardTarget(shardTarget); } } } public void score(float score) { this.score = score; } @Override public float score() { return this.score; } @Override public float getScore() { return score(); } public void version(long version) { this.version = version; } @Override public long version() { return this.version; } @Override public long getVersion() { return this.version; } @Override public String index() { return shard.index(); } @Override public String getIndex() { return index(); } @Override public String id() { return id.string(); } @Override public String getId() { return id(); } @Override public String type() { return type.string(); } @Override public String getType() { return type(); } @Override public NestedIdentity getNestedIdentity() { return nestedIdentity; } /** * Returns bytes reference, also un compress the source if needed. */ @Override public BytesReference sourceRef() { try { this.source = CompressorFactory.uncompressIfNeeded(this.source); return this.source; } catch (IOException e) { throw new ElasticsearchParseException("failed to decompress source", e); } } /** * Sets representation, might be compressed.... */ public InternalSearchHit sourceRef(BytesReference source) { this.source = source; this.sourceAsBytes = null; this.sourceAsMap = null; return this; } @Override public BytesReference getSourceRef() { return sourceRef(); } /** * Internal source representation, might be compressed.... */ public BytesReference internalSourceRef() { return source; } @Override public byte[] source() { if (source == null) { return null; } if (sourceAsBytes != null) { return sourceAsBytes; } this.sourceAsBytes = sourceRef().toBytes(); return this.sourceAsBytes; } @Override public boolean isSourceEmpty() { return source == null; } @Override public Map<String, Object> getSource() { return sourceAsMap(); } @Override public String sourceAsString() { if (source == null) { return null; } try { return XContentHelper.convertToJson(sourceRef(), false); } catch (IOException e) { throw new ElasticsearchParseException("failed to convert source to a json string"); } } @Override public String getSourceAsString() { return sourceAsString(); } @SuppressWarnings({"unchecked"}) @Override public Map<String, Object> sourceAsMap() throws ElasticsearchParseException { if (source == null) { return null; } if (sourceAsMap != null) { return sourceAsMap; } sourceAsMap = SourceLookup.sourceAsMap(source); return sourceAsMap; } @Override public Iterator<SearchHitField> iterator() { return fields.values().iterator(); } @Override public SearchHitField field(String fieldName) { return fields().get(fieldName); } @Override public Map<String, SearchHitField> fields() { if (fields == null) { return ImmutableMap.of(); } return fields; } // returns the fields without handling null cases public Map<String, SearchHitField> fieldsOrNull() { return this.fields; } @Override public Map<String, SearchHitField> getFields() { return fields(); } public void fields(Map<String, SearchHitField> fields) { this.fields = fields; } public Map<String, HighlightField> internalHighlightFields() { return highlightFields; } @Override public Map<String, HighlightField> highlightFields() { if (highlightFields == null) { return ImmutableMap.of(); } return this.highlightFields; } @Override public Map<String, HighlightField> getHighlightFields() { return highlightFields(); } public void highlightFields(Map<String, HighlightField> highlightFields) { this.highlightFields = highlightFields; } public void sortValues(Object[] sortValues) { // LUCENE 4 UPGRADE: There must be a better way // we want to convert to a Text object here, and not BytesRef // Don't write into sortValues! Otherwise the fields in FieldDoc is modified, which may be used in other places. (SearchContext#lastEmitedDoc) Object[] sortValuesCopy = new Object[sortValues.length]; System.arraycopy(sortValues, 0, sortValuesCopy, 0, sortValues.length); if (sortValues != null) { for (int i = 0; i < sortValues.length; i++) { if (sortValues[i] instanceof BytesRef) { sortValuesCopy[i] = new StringAndBytesText(new BytesArray((BytesRef) sortValues[i])); } } } this.sortValues = sortValuesCopy; } @Override public Object[] sortValues() { return sortValues; } @Override public Object[] getSortValues() { return sortValues(); } @Override public Explanation explanation() { return explanation; } @Override public Explanation getExplanation() { return explanation(); } public void explanation(Explanation explanation) { this.explanation = explanation; } @Override public SearchShardTarget shard() { return shard; } @Override public SearchShardTarget getShard() { return shard(); } public void shard(SearchShardTarget target) { this.shard = target; } public void matchedQueries(String[] matchedQueries) { this.matchedQueries = matchedQueries; } @Override public String[] matchedQueries() { return this.matchedQueries; } @Override public String[] getMatchedQueries() { return this.matchedQueries; } @Override @SuppressWarnings("unchecked") public Map<String, SearchHits> getInnerHits() { return (Map) innerHits; } public void setInnerHits(Map<String, InternalSearchHits> innerHits) { this.innerHits = innerHits; } public static class Fields { static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); static final XContentBuilderString _ID = new XContentBuilderString("_id"); static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); static final XContentBuilderString _SCORE = new XContentBuilderString("_score"); static final XContentBuilderString FIELDS = new XContentBuilderString("fields"); static final XContentBuilderString HIGHLIGHT = new XContentBuilderString("highlight"); static final XContentBuilderString SORT = new XContentBuilderString("sort"); static final XContentBuilderString MATCHED_QUERIES = new XContentBuilderString("matched_queries"); static final XContentBuilderString _EXPLANATION = new XContentBuilderString("_explanation"); static final XContentBuilderString VALUE = new XContentBuilderString("value"); static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description"); static final XContentBuilderString DETAILS = new XContentBuilderString("details"); static final XContentBuilderString INNER_HITS = new XContentBuilderString("inner_hits"); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { List<SearchHitField> metaFields = Lists.newArrayList(); List<SearchHitField> otherFields = Lists.newArrayList(); if (fields != null && !fields.isEmpty()) { for (SearchHitField field : fields.values()) { if (field.values().isEmpty()) { continue; } if (field.isMetadataField()) { metaFields.add(field); } else { otherFields.add(field); } } } builder.startObject(); // For inner_hit hits shard is null and that is ok, because the parent search hit has all this information. // Even if this was included in the inner_hit hits this would be the same, so better leave it out. if (explanation() != null && shard != null) { builder.field("_shard", shard.shardId()); builder.field("_node", shard.nodeIdText()); } if (shard != null) { builder.field(Fields._INDEX, shard.indexText()); } builder.field(Fields._TYPE, type); builder.field(Fields._ID, id); if (nestedIdentity != null) { nestedIdentity.toXContent(builder, params); } if (version != -1) { builder.field(Fields._VERSION, version); } if (Float.isNaN(score)) { builder.nullField(Fields._SCORE); } else { builder.field(Fields._SCORE, score); } for (SearchHitField field : metaFields) { builder.field(field.name(), field.value()); } if (source != null) { XContentHelper.writeRawField("_source", source, builder, params); } if (!otherFields.isEmpty()) { builder.startObject(Fields.FIELDS); for (SearchHitField field : otherFields) { builder.startArray(field.name()); for (Object value : field.getValues()) { builder.value(value); } builder.endArray(); } builder.endObject(); } if (highlightFields != null && !highlightFields.isEmpty()) { builder.startObject(Fields.HIGHLIGHT); for (HighlightField field : highlightFields.values()) { builder.field(field.name()); if (field.fragments() == null) { builder.nullValue(); } else { builder.startArray(); for (Text fragment : field.fragments()) { builder.value(fragment); } builder.endArray(); } } builder.endObject(); } if (sortValues != null && sortValues.length > 0) { builder.startArray(Fields.SORT); for (Object sortValue : sortValues) { if (sortValue != null && sortValue.equals(MAX_TERM_AS_TEXT)) { // We don't display MAX_TERM in JSON responses in case some clients have UTF-8 parsers that wouldn't accept a // non-character in the response, even though this is valid UTF-8 builder.nullValue(); } else { builder.value(sortValue); } } builder.endArray(); } if (matchedQueries.length > 0) { builder.startArray(Fields.MATCHED_QUERIES); for (String matchedFilter : matchedQueries) { builder.value(matchedFilter); } builder.endArray(); } if (explanation() != null) { builder.field(Fields._EXPLANATION); buildExplanation(builder, explanation()); } if (innerHits != null) { builder.startObject(Fields.INNER_HITS); for (Map.Entry<String, InternalSearchHits> entry : innerHits.entrySet()) { builder.startObject(entry.getKey()); entry.getValue().toXContent(builder, params); builder.endObject(); } builder.endObject(); } builder.endObject(); return builder; } private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { builder.startObject(); builder.field(Fields.VALUE, explanation.getValue()); builder.field(Fields.DESCRIPTION, explanation.getDescription()); Explanation[] innerExps = explanation.getDetails(); if (innerExps != null) { builder.startArray(Fields.DETAILS); for (Explanation exp : innerExps) { buildExplanation(builder, exp); } builder.endArray(); } builder.endObject(); } public static InternalSearchHit readSearchHit(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { InternalSearchHit hit = new InternalSearchHit(); hit.readFrom(in, context); return hit; } @Override public void readFrom(StreamInput in) throws IOException { readFrom(in, InternalSearchHits.streamContext().streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM)); } public void readFrom(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { score = in.readFloat(); id = in.readText(); type = in.readText(); nestedIdentity = in.readOptionalStreamable(new InternalNestedIdentity()); version = in.readLong(); source = in.readBytesReference(); if (source.length() == 0) { source = null; } if (in.readBoolean()) { explanation = readExplanation(in); } int size = in.readVInt(); if (size == 0) { fields = ImmutableMap.of(); } else if (size == 1) { SearchHitField hitField = readSearchHitField(in); fields = ImmutableMap.of(hitField.name(), hitField); } else if (size == 2) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2); } else if (size == 3) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); SearchHitField hitField3 = readSearchHitField(in); fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3); } else if (size == 4) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); SearchHitField hitField3 = readSearchHitField(in); SearchHitField hitField4 = readSearchHitField(in); fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3, hitField4.name(), hitField4); } else if (size == 5) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); SearchHitField hitField3 = readSearchHitField(in); SearchHitField hitField4 = readSearchHitField(in); SearchHitField hitField5 = readSearchHitField(in); fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3, hitField4.name(), hitField4, hitField5.name(), hitField5); } else { ImmutableMap.Builder<String, SearchHitField> builder = ImmutableMap.builder(); for (int i = 0; i < size; i++) { SearchHitField hitField = readSearchHitField(in); builder.put(hitField.name(), hitField); } fields = builder.build(); } size = in.readVInt(); if (size == 0) { highlightFields = ImmutableMap.of(); } else if (size == 1) { HighlightField field = readHighlightField(in); highlightFields = ImmutableMap.of(field.name(), field); } else if (size == 2) { HighlightField field1 = readHighlightField(in); HighlightField field2 = readHighlightField(in); highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2); } else if (size == 3) { HighlightField field1 = readHighlightField(in); HighlightField field2 = readHighlightField(in); HighlightField field3 = readHighlightField(in); highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2, field3.name(), field3); } else if (size == 4) { HighlightField field1 = readHighlightField(in); HighlightField field2 = readHighlightField(in); HighlightField field3 = readHighlightField(in); HighlightField field4 = readHighlightField(in); highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2, field3.name(), field3, field4.name(), field4); } else { ImmutableMap.Builder<String, HighlightField> builder = ImmutableMap.builder(); for (int i = 0; i < size; i++) { HighlightField field = readHighlightField(in); builder.put(field.name(), field); } highlightFields = builder.build(); } size = in.readVInt(); if (size > 0) { sortValues = new Object[size]; for (int i = 0; i < sortValues.length; i++) { byte type = in.readByte(); if (type == 0) { sortValues[i] = null; } else if (type == 1) { sortValues[i] = in.readString(); } else if (type == 2) { sortValues[i] = in.readInt(); } else if (type == 3) { sortValues[i] = in.readLong(); } else if (type == 4) { sortValues[i] = in.readFloat(); } else if (type == 5) { sortValues[i] = in.readDouble(); } else if (type == 6) { sortValues[i] = in.readByte(); } else if (type == 7) { sortValues[i] = in.readShort(); } else if (type == 8) { sortValues[i] = in.readBoolean(); } else if (type == 9) { sortValues[i] = in.readText(); } else { throw new IOException("Can't match type [" + type + "]"); } } } size = in.readVInt(); if (size > 0) { matchedQueries = new String[size]; for (int i = 0; i < size; i++) { matchedQueries[i] = in.readString(); } } if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.STREAM) { if (in.readBoolean()) { shard = readSearchShardTarget(in); } } else if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.LOOKUP) { int lookupId = in.readVInt(); if (lookupId > 0) { shard = context.handleShardLookup().get(lookupId); } } size = in.readVInt(); if (size > 0) { innerHits = new HashMap<>(size); for (int i = 0; i < size; i++) { String key = in.readString(); InternalSearchHits value = InternalSearchHits.readSearchHits(in, InternalSearchHits.streamContext().streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.NO_STREAM)); innerHits.put(key, value); } } } @Override public void writeTo(StreamOutput out) throws IOException { writeTo(out, InternalSearchHits.streamContext().streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM)); } public void writeTo(StreamOutput out, InternalSearchHits.StreamContext context) throws IOException { out.writeFloat(score); out.writeText(id); out.writeText(type); out.writeOptionalStreamable(nestedIdentity); out.writeLong(version); out.writeBytesReference(source); if (explanation == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeExplanation(out, explanation); } if (fields == null) { out.writeVInt(0); } else { out.writeVInt(fields.size()); for (SearchHitField hitField : fields().values()) { hitField.writeTo(out); } } if (highlightFields == null) { out.writeVInt(0); } else { out.writeVInt(highlightFields.size()); for (HighlightField highlightField : highlightFields.values()) { highlightField.writeTo(out); } } if (sortValues.length == 0) { out.writeVInt(0); } else { out.writeVInt(sortValues.length); for (Object sortValue : sortValues) { if (sortValue == null) { out.writeByte((byte) 0); } else { Class type = sortValue.getClass(); if (type == String.class) { out.writeByte((byte) 1); out.writeString((String) sortValue); } else if (type == Integer.class) { out.writeByte((byte) 2); out.writeInt((Integer) sortValue); } else if (type == Long.class) { out.writeByte((byte) 3); out.writeLong((Long) sortValue); } else if (type == Float.class) { out.writeByte((byte) 4); out.writeFloat((Float) sortValue); } else if (type == Double.class) { out.writeByte((byte) 5); out.writeDouble((Double) sortValue); } else if (type == Byte.class) { out.writeByte((byte) 6); out.writeByte((Byte) sortValue); } else if (type == Short.class) { out.writeByte((byte) 7); out.writeShort((Short) sortValue); } else if (type == Boolean.class) { out.writeByte((byte) 8); out.writeBoolean((Boolean) sortValue); } else if (sortValue instanceof Text) { out.writeByte((byte) 9); out.writeText((Text) sortValue); } else { throw new IOException("Can't handle sort field value of type [" + type + "]"); } } } } if (matchedQueries.length == 0) { out.writeVInt(0); } else { out.writeVInt(matchedQueries.length); for (String matchedFilter : matchedQueries) { out.writeString(matchedFilter); } } if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.STREAM) { if (shard == null) { out.writeBoolean(false); } else { out.writeBoolean(true); shard.writeTo(out); } } else if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.LOOKUP) { if (shard == null) { out.writeVInt(0); } else { out.writeVInt(context.shardHandleLookup().get(shard)); } } if (innerHits == null) { out.writeVInt(0); } else { out.writeVInt(innerHits.size()); for (Map.Entry<String, InternalSearchHits> entry : innerHits.entrySet()) { out.writeString(entry.getKey()); entry.getValue().writeTo(out, InternalSearchHits.streamContext().streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.NO_STREAM)); } } } public final static class InternalNestedIdentity implements NestedIdentity, Streamable, ToXContent { private Text field; private int offset; private InternalNestedIdentity child; public InternalNestedIdentity(String field, int offset, InternalNestedIdentity child) { this.field = new StringAndBytesText(field); this.offset = offset; this.child = child; } InternalNestedIdentity() { } @Override public Text getField() { return field; } @Override public int getOffset() { return offset; } @Override public NestedIdentity getChild() { return child; } @Override public void readFrom(StreamInput in) throws IOException { field = in.readOptionalText(); offset = in.readInt(); child = in.readOptionalStreamable(new InternalNestedIdentity()); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalText(field); out.writeInt(offset); out.writeOptionalStreamable(child); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields._NESTED); if (field != null) { builder.field(Fields._NESTED_FIELD, field); } if (offset != -1) { builder.field(Fields._NESTED_OFFSET, offset); } if (child != null) { builder = child.toXContent(builder, params); } builder.endObject(); return builder; } public static class Fields { static final XContentBuilderString _NESTED = new XContentBuilderString("_nested"); static final XContentBuilderString _NESTED_FIELD = new XContentBuilderString("field"); static final XContentBuilderString _NESTED_OFFSET = new XContentBuilderString("offset"); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2/entity_type.proto package com.google.cloud.dialogflow.v2; /** * <pre> * The request message for [EntityTypes.UpdateEntityType][google.cloud.dialogflow.v2.EntityTypes.UpdateEntityType]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.UpdateEntityTypeRequest} */ public final class UpdateEntityTypeRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.UpdateEntityTypeRequest) UpdateEntityTypeRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateEntityTypeRequest.newBuilder() to construct. private UpdateEntityTypeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateEntityTypeRequest() { languageCode_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdateEntityTypeRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { com.google.cloud.dialogflow.v2.EntityType.Builder subBuilder = null; if (entityType_ != null) { subBuilder = entityType_.toBuilder(); } entityType_ = input.readMessage(com.google.cloud.dialogflow.v2.EntityType.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(entityType_); entityType_ = subBuilder.buildPartial(); } break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); languageCode_ = s; break; } case 26: { com.google.protobuf.FieldMask.Builder subBuilder = null; if (updateMask_ != null) { subBuilder = updateMask_.toBuilder(); } updateMask_ = input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(updateMask_); updateMask_ = subBuilder.buildPartial(); } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_UpdateEntityTypeRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_UpdateEntityTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest.class, com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest.Builder.class); } public static final int ENTITY_TYPE_FIELD_NUMBER = 1; private com.google.cloud.dialogflow.v2.EntityType entityType_; /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public boolean hasEntityType() { return entityType_ != null; } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public com.google.cloud.dialogflow.v2.EntityType getEntityType() { return entityType_ == null ? com.google.cloud.dialogflow.v2.EntityType.getDefaultInstance() : entityType_; } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public com.google.cloud.dialogflow.v2.EntityTypeOrBuilder getEntityTypeOrBuilder() { return getEntityType(); } public static final int LANGUAGE_CODE_FIELD_NUMBER = 2; private volatile java.lang.Object languageCode_; /** * <pre> * Optional. The language of entity synonyms defined in `entity_type`. If not * specified, the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } } /** * <pre> * Optional. The language of entity synonyms defined in `entity_type`. If not * specified, the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int UPDATE_MASK_FIELD_NUMBER = 3; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public boolean hasUpdateMask() { return updateMask_ != null; } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return getUpdateMask(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (entityType_ != null) { output.writeMessage(1, getEntityType()); } if (!getLanguageCodeBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, languageCode_); } if (updateMask_ != null) { output.writeMessage(3, getUpdateMask()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (entityType_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getEntityType()); } if (!getLanguageCodeBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, languageCode_); } if (updateMask_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, getUpdateMask()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest other = (com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest) obj; boolean result = true; result = result && (hasEntityType() == other.hasEntityType()); if (hasEntityType()) { result = result && getEntityType() .equals(other.getEntityType()); } result = result && getLanguageCode() .equals(other.getLanguageCode()); result = result && (hasUpdateMask() == other.hasUpdateMask()); if (hasUpdateMask()) { result = result && getUpdateMask() .equals(other.getUpdateMask()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasEntityType()) { hash = (37 * hash) + ENTITY_TYPE_FIELD_NUMBER; hash = (53 * hash) + getEntityType().hashCode(); } hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER; hash = (53 * hash) + getLanguageCode().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The request message for [EntityTypes.UpdateEntityType][google.cloud.dialogflow.v2.EntityTypes.UpdateEntityType]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.UpdateEntityTypeRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.UpdateEntityTypeRequest) com.google.cloud.dialogflow.v2.UpdateEntityTypeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_UpdateEntityTypeRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_UpdateEntityTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest.class, com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); if (entityTypeBuilder_ == null) { entityType_ = null; } else { entityType_ = null; entityTypeBuilder_ = null; } languageCode_ = ""; if (updateMaskBuilder_ == null) { updateMask_ = null; } else { updateMask_ = null; updateMaskBuilder_ = null; } return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_UpdateEntityTypeRequest_descriptor; } public com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest.getDefaultInstance(); } public com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest build() { com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest buildPartial() { com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest result = new com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest(this); if (entityTypeBuilder_ == null) { result.entityType_ = entityType_; } else { result.entityType_ = entityTypeBuilder_.build(); } result.languageCode_ = languageCode_; if (updateMaskBuilder_ == null) { result.updateMask_ = updateMask_; } else { result.updateMask_ = updateMaskBuilder_.build(); } onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest) { return mergeFrom((com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest other) { if (other == com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest.getDefaultInstance()) return this; if (other.hasEntityType()) { mergeEntityType(other.getEntityType()); } if (!other.getLanguageCode().isEmpty()) { languageCode_ = other.languageCode_; onChanged(); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.cloud.dialogflow.v2.EntityType entityType_ = null; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.EntityType, com.google.cloud.dialogflow.v2.EntityType.Builder, com.google.cloud.dialogflow.v2.EntityTypeOrBuilder> entityTypeBuilder_; /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public boolean hasEntityType() { return entityTypeBuilder_ != null || entityType_ != null; } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public com.google.cloud.dialogflow.v2.EntityType getEntityType() { if (entityTypeBuilder_ == null) { return entityType_ == null ? com.google.cloud.dialogflow.v2.EntityType.getDefaultInstance() : entityType_; } else { return entityTypeBuilder_.getMessage(); } } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public Builder setEntityType(com.google.cloud.dialogflow.v2.EntityType value) { if (entityTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } entityType_ = value; onChanged(); } else { entityTypeBuilder_.setMessage(value); } return this; } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public Builder setEntityType( com.google.cloud.dialogflow.v2.EntityType.Builder builderForValue) { if (entityTypeBuilder_ == null) { entityType_ = builderForValue.build(); onChanged(); } else { entityTypeBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public Builder mergeEntityType(com.google.cloud.dialogflow.v2.EntityType value) { if (entityTypeBuilder_ == null) { if (entityType_ != null) { entityType_ = com.google.cloud.dialogflow.v2.EntityType.newBuilder(entityType_).mergeFrom(value).buildPartial(); } else { entityType_ = value; } onChanged(); } else { entityTypeBuilder_.mergeFrom(value); } return this; } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public Builder clearEntityType() { if (entityTypeBuilder_ == null) { entityType_ = null; onChanged(); } else { entityType_ = null; entityTypeBuilder_ = null; } return this; } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public com.google.cloud.dialogflow.v2.EntityType.Builder getEntityTypeBuilder() { onChanged(); return getEntityTypeFieldBuilder().getBuilder(); } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ public com.google.cloud.dialogflow.v2.EntityTypeOrBuilder getEntityTypeOrBuilder() { if (entityTypeBuilder_ != null) { return entityTypeBuilder_.getMessageOrBuilder(); } else { return entityType_ == null ? com.google.cloud.dialogflow.v2.EntityType.getDefaultInstance() : entityType_; } } /** * <pre> * Required. The entity type to update. * Format: `projects/&lt;Project ID&gt;/agent/entityTypes/&lt;EntityType ID&gt;`. * </pre> * * <code>.google.cloud.dialogflow.v2.EntityType entity_type = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.EntityType, com.google.cloud.dialogflow.v2.EntityType.Builder, com.google.cloud.dialogflow.v2.EntityTypeOrBuilder> getEntityTypeFieldBuilder() { if (entityTypeBuilder_ == null) { entityTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.EntityType, com.google.cloud.dialogflow.v2.EntityType.Builder, com.google.cloud.dialogflow.v2.EntityTypeOrBuilder>( getEntityType(), getParentForChildren(), isClean()); entityType_ = null; } return entityTypeBuilder_; } private java.lang.Object languageCode_ = ""; /** * <pre> * Optional. The language of entity synonyms defined in `entity_type`. If not * specified, the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Optional. The language of entity synonyms defined in `entity_type`. If not * specified, the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Optional. The language of entity synonyms defined in `entity_type`. If not * specified, the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public Builder setLanguageCode( java.lang.String value) { if (value == null) { throw new NullPointerException(); } languageCode_ = value; onChanged(); return this; } /** * <pre> * Optional. The language of entity synonyms defined in `entity_type`. If not * specified, the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public Builder clearLanguageCode() { languageCode_ = getDefaultInstance().getLanguageCode(); onChanged(); return this; } /** * <pre> * Optional. The language of entity synonyms defined in `entity_type`. If not * specified, the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public Builder setLanguageCodeBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); languageCode_ = value; onChanged(); return this; } private com.google.protobuf.FieldMask updateMask_ = null; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public boolean hasUpdateMask() { return updateMaskBuilder_ != null || updateMask_ != null; } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; onChanged(); } else { updateMaskBuilder_.setMessage(value); } return this; } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); onChanged(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (updateMask_ != null) { updateMask_ = com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial(); } else { updateMask_ = value; } onChanged(); } else { updateMaskBuilder_.mergeFrom(value); } return this; } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder clearUpdateMask() { if (updateMaskBuilder_ == null) { updateMask_ = null; onChanged(); } else { updateMask_ = null; updateMaskBuilder_ = null; } return this; } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * Optional. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.UpdateEntityTypeRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.UpdateEntityTypeRequest) private static final com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest(); } public static com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateEntityTypeRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateEntityTypeRequest>() { public UpdateEntityTypeRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new UpdateEntityTypeRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<UpdateEntityTypeRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateEntityTypeRequest> getParserForType() { return PARSER; } public com.google.cloud.dialogflow.v2.UpdateEntityTypeRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import static org.easymock.EasyMock.expect; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import com.facebook.buck.java.AccumulateClassNames; import com.facebook.buck.java.FakeJavaLibraryRule; import com.facebook.buck.java.abi.AbiWriterProtocol; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.rules.AbiRule; import com.facebook.buck.rules.BuildContext; import com.facebook.buck.rules.FakeBuildableContext; import com.facebook.buck.rules.Sha1HashCode; import com.facebook.buck.step.ExecutionContext; import com.facebook.buck.step.Step; import com.facebook.buck.step.TestExecutionContext; import com.facebook.buck.testutil.MoreAsserts; import com.facebook.buck.util.AndroidPlatformTarget; import com.facebook.buck.util.ProjectFilesystem; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Iterables; import com.google.common.hash.HashCode; import org.easymock.EasyMockSupport; import org.junit.Test; import java.io.File; import java.io.IOException; import java.nio.file.Paths; import java.util.List; public class DexProducedFromJavaLibraryThatContainsClassFilesTest extends EasyMockSupport { @Test public void testGetBuildStepsWhenThereAreClassesToDex() throws IOException { FakeJavaLibraryRule javaLibraryRule = new FakeJavaLibraryRule(new BuildTarget("//foo", "bar")); javaLibraryRule.setOutputFile("buck-out/gen/foo/bar.jar"); AccumulateClassNames accumulateClassNames = createMock(AccumulateClassNames.class); expect(accumulateClassNames.getClassNames()).andReturn( ImmutableSortedMap.of("com/example/Foo", HashCode.fromString("cafebabe"))); expect(accumulateClassNames.getJavaLibraryRule()).andReturn(javaLibraryRule); BuildContext context = createMock(BuildContext.class); FakeBuildableContext buildableContext = new FakeBuildableContext(); replayAll(); BuildTarget buildTarget = new BuildTarget("//foo", "bar", "dex"); DexProducedFromJavaLibraryThatContainsClassFiles preDex = new DexProducedFromJavaLibraryThatContainsClassFiles(buildTarget, accumulateClassNames); List<Step> steps = preDex.getBuildSteps(context, buildableContext); verifyAll(); resetAll(); AndroidPlatformTarget androidPlatformTarget = createMock(AndroidPlatformTarget.class); expect(androidPlatformTarget.getDxExecutable()).andReturn(new File("/usr/bin/dx")); ProjectFilesystem projectFilesystem = createMock(ProjectFilesystem.class); expect(projectFilesystem.resolve(Paths.get("buck-out/gen/foo"))) .andReturn(Paths.get("/home/user/buck-out/gen/foo")); expect(projectFilesystem.resolve(Paths.get("buck-out/gen/foo/bar#dex.dex.jar"))) .andReturn(Paths.get("/home/user/buck-out/gen/foo/bar#dex.dex.jar")); expect(projectFilesystem.resolve(Paths.get("buck-out/gen/foo/bar.jar"))) .andReturn(Paths.get("/home/user/buck-out/gen/foo/bar.jar")); replayAll(); ExecutionContext executionContext = TestExecutionContext .newBuilder() .setAndroidPlatformTarget(Optional.of(androidPlatformTarget)) .setProjectFilesystem(projectFilesystem) .build(); String expectedDxCommand = "/usr/bin/dx" + " --dex --no-optimize --force-jumbo --output buck-out/gen/foo/bar#dex.dex.jar " + "/home/user/buck-out/gen/foo/bar.jar"; MoreAsserts.assertSteps("Generate bar.dex.jar.", ImmutableList.of( "rm -f /home/user/buck-out/gen/foo/bar#dex.dex.jar", "mkdir -p /home/user/buck-out/gen/foo", expectedDxCommand, "record_dx_success"), steps, executionContext); verifyAll(); resetAll(); Sha1HashCode abiKey = new Sha1HashCode("f7f34ed13b881c6c6f663533cde4a436ea84435e"); expect(accumulateClassNames.getAbiKey()).andReturn(abiKey); replayAll(); Step recordArtifactAndMetadataStep = steps.get(3); int exitCode = recordArtifactAndMetadataStep.execute(executionContext); assertEquals(0, exitCode); assertTrue("The generated .dex.jar file should be in the set of recorded artifacts.", buildableContext.getRecordedArtifacts().contains(Paths.get("bar#dex.dex.jar"))); buildableContext.assertContainsMetadataMapping(AbiRule.ABI_KEY_FOR_DEPS_ON_DISK_METADATA, abiKey.getHash()); buildableContext.assertContainsMetadataMapping(AbiRule.ABI_KEY_ON_DISK_METADATA, abiKey.getHash()); verifyAll(); } @Test public void testGetBuildStepsWhenThereAreNoClassesToDex() throws IOException { AccumulateClassNames accumulateClassNames = createMock(AccumulateClassNames.class); expect(accumulateClassNames.getClassNames()).andReturn( ImmutableSortedMap.<String, HashCode>of()); BuildContext context = createMock(BuildContext.class); FakeBuildableContext buildableContext = new FakeBuildableContext(); replayAll(); BuildTarget buildTarget = new BuildTarget("//foo", "bar"); DexProducedFromJavaLibraryThatContainsClassFiles preDex = new DexProducedFromJavaLibraryThatContainsClassFiles(buildTarget, accumulateClassNames); List<Step> steps = preDex.getBuildSteps(context, buildableContext); verifyAll(); resetAll(); ProjectFilesystem projectFilesystem = createMock(ProjectFilesystem.class); expect(projectFilesystem.resolve(Paths.get("buck-out/gen/foo"))) .andReturn(Paths.get("/home/user/buck-out/gen/foo")); expect(projectFilesystem.resolve(Paths.get("buck-out/gen/foo/bar.dex.jar"))) .andReturn(Paths.get("/home/user/buck-out/gen/foo/bar.dex.jar")); replayAll(); ExecutionContext executionContext = TestExecutionContext .newBuilder() .setProjectFilesystem(projectFilesystem) .build(); MoreAsserts.assertSteps("Do not generate a .dex.jar file.", ImmutableList.of( "rm -f /home/user/buck-out/gen/foo/bar.dex.jar", "mkdir -p /home/user/buck-out/gen/foo", "record_empty_dx"), steps, executionContext); verifyAll(); resetAll(); Sha1HashCode abiKey = new Sha1HashCode(AbiWriterProtocol.EMPTY_ABI_KEY); expect(accumulateClassNames.getAbiKey()).andReturn(abiKey); replayAll(); Step recordArtifactAndMetadataStep = steps.get(2); int exitCode = recordArtifactAndMetadataStep.execute(executionContext); assertEquals(0, exitCode); buildableContext.assertContainsMetadataMapping(AbiRule.ABI_KEY_FOR_DEPS_ON_DISK_METADATA, abiKey.getHash()); buildableContext.assertContainsMetadataMapping(AbiRule.ABI_KEY_ON_DISK_METADATA, abiKey.getHash()); verifyAll(); } @Test public void testObserverMethods() { AccumulateClassNames accumulateClassNames = createMock(AccumulateClassNames.class); expect(accumulateClassNames.getClassNames()) .andReturn(ImmutableSortedMap.of("com/example/Foo", HashCode.fromString("cafebabe"))) .anyTimes(); replayAll(); BuildTarget buildTarget = new BuildTarget("//foo", "bar"); DexProducedFromJavaLibraryThatContainsClassFiles preDexWithClasses = new DexProducedFromJavaLibraryThatContainsClassFiles(buildTarget, accumulateClassNames); assertNull(preDexWithClasses.getPathToOutputFile()); assertTrue(Iterables.isEmpty(preDexWithClasses.getInputsToCompareToOutput())); assertEquals(Paths.get("buck-out/gen/foo/bar.dex.jar"), preDexWithClasses.getPathToDex()); assertTrue(preDexWithClasses.hasOutput()); verifyAll(); } }
// Copyright 2017 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package adwords.axis.v201809.campaignmanagement; import static com.google.api.ads.common.lib.utils.Builder.DEFAULT_CONFIGURATION_FILENAME; import com.beust.jcommander.Parameter; import com.google.api.ads.adwords.axis.factory.AdWordsServices; import com.google.api.ads.adwords.axis.v201809.cm.ApiError; import com.google.api.ads.adwords.axis.v201809.cm.ApiException; import com.google.api.ads.adwords.axis.v201809.cm.Campaign; import com.google.api.ads.adwords.axis.v201809.cm.CampaignGroup; import com.google.api.ads.adwords.axis.v201809.cm.CampaignGroupOperation; import com.google.api.ads.adwords.axis.v201809.cm.CampaignGroupPerformanceTarget; import com.google.api.ads.adwords.axis.v201809.cm.CampaignGroupPerformanceTargetOperation; import com.google.api.ads.adwords.axis.v201809.cm.CampaignGroupPerformanceTargetServiceInterface; import com.google.api.ads.adwords.axis.v201809.cm.CampaignGroupServiceInterface; import com.google.api.ads.adwords.axis.v201809.cm.CampaignOperation; import com.google.api.ads.adwords.axis.v201809.cm.CampaignReturnValue; import com.google.api.ads.adwords.axis.v201809.cm.CampaignServiceInterface; import com.google.api.ads.adwords.axis.v201809.cm.EfficiencyTargetType; import com.google.api.ads.adwords.axis.v201809.cm.Money; import com.google.api.ads.adwords.axis.v201809.cm.Operator; import com.google.api.ads.adwords.axis.v201809.cm.PerformanceTarget; import com.google.api.ads.adwords.axis.v201809.cm.SpendTargetType; import com.google.api.ads.adwords.axis.v201809.cm.VolumeGoalType; import com.google.api.ads.adwords.lib.client.AdWordsSession; import com.google.api.ads.adwords.lib.factory.AdWordsServicesInterface; import com.google.api.ads.adwords.lib.utils.examples.ArgumentNames; import com.google.api.ads.common.lib.auth.OfflineCredentials; import com.google.api.ads.common.lib.auth.OfflineCredentials.Api; import com.google.api.ads.common.lib.conf.ConfigurationLoadException; import com.google.api.ads.common.lib.exception.OAuthException; import com.google.api.ads.common.lib.exception.ValidationException; import com.google.api.ads.common.lib.utils.examples.CodeSampleParams; import com.google.api.client.auth.oauth2.Credential; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.joda.time.DateTime; /** * This code example adds a campaign group and sets a performance target for that group. To get * campaigns, run GetCampaigns.java. To download reports, run DownloadCriteriaReportWithAwql.java. */ public class AddCampaignGroupsAndPerformanceTargets { private static class AddCampaignGroupsAndPerformanceTargetsParams extends CodeSampleParams { @Parameter(names = ArgumentNames.CAMPAIGN_ID, required = true) private List<Long> campaignIds; } public static void main(String[] args) { AdWordsSession session; try { // Generate a refreshable OAuth2 credential. Credential oAuth2Credential = new OfflineCredentials.Builder() .forApi(Api.ADWORDS) .fromFile() .build() .generateCredential(); // Construct an AdWordsSession. session = new AdWordsSession.Builder().fromFile().withOAuth2Credential(oAuth2Credential).build(); } catch (ConfigurationLoadException cle) { System.err.printf( "Failed to load configuration from the %s file. Exception: %s%n", DEFAULT_CONFIGURATION_FILENAME, cle); return; } catch (ValidationException ve) { System.err.printf( "Invalid configuration in the %s file. Exception: %s%n", DEFAULT_CONFIGURATION_FILENAME, ve); return; } catch (OAuthException oe) { System.err.printf( "Failed to create OAuth credentials. Check OAuth settings in the %s file. " + "Exception: %s%n", DEFAULT_CONFIGURATION_FILENAME, oe); return; } AdWordsServicesInterface adWordsServices = AdWordsServices.getInstance(); AddCampaignGroupsAndPerformanceTargetsParams params = new AddCampaignGroupsAndPerformanceTargetsParams(); if (!params.parseArguments(args)) { // Either pass the required parameters for this example on the command line, or insert them // into the code here. See the parameter class definition above for descriptions. params.campaignIds = Arrays.asList( Long.valueOf("INSERT_CAMPAIGN_ID_HERE"), Long.valueOf("INSERT_CAMPAIGN_ID_HERE")); } try { runExample(adWordsServices, session, params.campaignIds); } catch (ApiException apiException) { // ApiException is the base class for most exceptions thrown by an API request. Instances // of this exception have a message and a collection of ApiErrors that indicate the // type and underlying cause of the exception. Every exception object in the adwords.axis // packages will return a meaningful value from toString // // ApiException extends RemoteException, so this catch block must appear before the // catch block for RemoteException. System.err.println("Request failed due to ApiException. Underlying ApiErrors:"); if (apiException.getErrors() != null) { int i = 0; for (ApiError apiError : apiException.getErrors()) { System.err.printf(" Error %d: %s%n", i++, apiError); } } } catch (RemoteException re) { System.err.printf( "Request failed unexpectedly due to RemoteException: %s%n", re); } } /** * Runs the example. * * @param adWordsServices the services factory. * @param session the session. * @param campaignIds the IDs of the campaigns to add to the campaign group. * @throws ApiException if the API request failed with one or more service errors. * @throws RemoteException if the API request failed due to other errors. */ public static void runExample( AdWordsServicesInterface adWordsServices, AdWordsSession session, List<Long> campaignIds) throws RemoteException { CampaignGroup campaignGroup = createCampaignGroup(adWordsServices, session); addCampaignsToGroup(adWordsServices, session, campaignGroup, campaignIds); createPerformanceTarget(adWordsServices, session, campaignGroup); } /** Creates a campaign group. */ private static CampaignGroup createCampaignGroup( AdWordsServicesInterface adWordsServices, AdWordsSession session) throws RemoteException { // Get the CampaignGroupService. CampaignGroupServiceInterface campaignGroupService = adWordsServices.get(session, CampaignGroupServiceInterface.class); // Create the campaign group. CampaignGroup campaignGroup = new CampaignGroup(); campaignGroup.setName("Mars campaign group #" + System.currentTimeMillis()); // Create the operation. CampaignGroupOperation operation = new CampaignGroupOperation(); operation.setOperand(campaignGroup); operation.setOperator(Operator.ADD); CampaignGroup newCampaignGroup = campaignGroupService.mutate(new CampaignGroupOperation[] {operation}).getValue(0); System.out.printf( "Campaign group with ID %d and name '%s' was created.%n", newCampaignGroup.getId(), newCampaignGroup.getName()); return newCampaignGroup; } /** Adds multiple campaigns to a campaign group. */ private static void addCampaignsToGroup( AdWordsServicesInterface adWordsServices, AdWordsSession session, CampaignGroup campaignGroup, List<Long> campaignIds) throws ApiException, RemoteException { // Get the CampaignService. CampaignServiceInterface campaignService = adWordsServices.get(session, CampaignServiceInterface.class); List<CampaignOperation> operations = new ArrayList<>(); for (Long campaignId : campaignIds) { Campaign campaign = new Campaign(); campaign.setId(campaignId); campaign.setCampaignGroupId(campaignGroup.getId()); CampaignOperation operation = new CampaignOperation(); operation.setOperand(campaign); operation.setOperator(Operator.SET); operations.add(operation); } CampaignReturnValue returnValue = campaignService.mutate(operations.toArray(new CampaignOperation[operations.size()])); System.out.printf( "The following campaign IDs were added to the campaign group with ID %d:%n", campaignGroup.getId()); for (Campaign campaign : returnValue.getValue()) { System.out.printf("\t%d%n", campaign.getId()); } } /** Creates a performance target for the campaign group. */ private static void createPerformanceTarget( AdWordsServicesInterface adWordsServices, AdWordsSession session, CampaignGroup campaignGroup) throws ApiException, RemoteException { // Get the CampaignGroupPerformanceTargetService. CampaignGroupPerformanceTargetServiceInterface campaignGroupPerformanceTargetService = adWordsServices.get(session, CampaignGroupPerformanceTargetServiceInterface.class); // Create the performance target. CampaignGroupPerformanceTarget campaignGroupPerformanceTarget = new CampaignGroupPerformanceTarget(); campaignGroupPerformanceTarget.setCampaignGroupId(campaignGroup.getId()); PerformanceTarget performanceTarget = new PerformanceTarget(); // Keep the CPC for the campaigns < $3. performanceTarget.setEfficiencyTargetType(EfficiencyTargetType.CPC_LESS_THAN_OR_EQUAL_TO); performanceTarget.setEfficiencyTargetValue(3000000d); // Keep the maximum spend under $50. performanceTarget.setSpendTargetType(SpendTargetType.MAXIMUM); Money maxSpend = new Money(); maxSpend.setMicroAmount(500000000L); performanceTarget.setSpendTarget(maxSpend); // Aim for at least 3000 clicks. performanceTarget.setVolumeTargetValue(3000L); performanceTarget.setVolumeGoalType(VolumeGoalType.MAXIMIZE_CLICKS); // Start the performance target today, and run it for the next 90 days. DateTime startDate = DateTime.now(); DateTime endDate = DateTime.now().plusDays(90); performanceTarget.setStartDate(startDate.toString("yyyyMMdd")); performanceTarget.setEndDate(endDate.toString("yyyyMMdd")); campaignGroupPerformanceTarget.setPerformanceTarget(performanceTarget); // Create the operation. CampaignGroupPerformanceTargetOperation operation = new CampaignGroupPerformanceTargetOperation(); operation.setOperand(campaignGroupPerformanceTarget); operation.setOperator(Operator.ADD); CampaignGroupPerformanceTarget newCampaignGroupPerformanceTarget = campaignGroupPerformanceTargetService .mutate(new CampaignGroupPerformanceTargetOperation[] {operation}) .getValue(0); // Display the results. System.out.printf( "Campaign group performance target with ID %d was added for campaign group ID %d.%n", newCampaignGroupPerformanceTarget.getId(), newCampaignGroupPerformanceTarget.getCampaignGroupId()); } }
package org.apache.maven.plugin.war.packaging; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.io.IOException; import org.apache.maven.model.Resource; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.war.Overlay; import org.apache.maven.plugin.war.util.PathSet; import org.apache.maven.shared.filtering.MavenFilteringException; import org.codehaus.plexus.util.DirectoryScanner; import org.codehaus.plexus.util.StringUtils; /** * Handles the project own resources, that is: <ul <li>The list of web resources, if any</li> <li>The content of the * webapp directory if it exists</li> <li>The custom deployment descriptor(s), if any</li> <li>The content of the * classes directory if it exists</li> <li>The dependencies of the project</li> </ul> * * @author Stephane Nicoll * @version $Id$ */ public class WarProjectPackagingTask extends AbstractWarPackagingTask { private final Resource[] webResources; private final File webXml; private final File containerConfigXML; private final String id; private Overlay currentProjectOverlay; /** * @param webResources {@link #webResources} * @param webXml {@link #webXml} * @param containerConfigXml {@link #containerConfigXML} * @param currentProjectOverlay {@link #currentProjectOverlay} */ public WarProjectPackagingTask( Resource[] webResources, File webXml, File containerConfigXml, Overlay currentProjectOverlay ) { if ( webResources != null ) { this.webResources = webResources; } else { this.webResources = new Resource[0]; } this.webXml = webXml; this.containerConfigXML = containerConfigXml; this.currentProjectOverlay = currentProjectOverlay; this.id = currentProjectOverlay.getId(); } /** * {@inheritDoc} */ public void performPackaging( WarPackagingContext context ) throws MojoExecutionException, MojoFailureException { context.getLog().info( "Processing war project" ); // Prepare the INF directories File webinfDir = new File( context.getWebappDirectory(), WEB_INF_PATH ); webinfDir.mkdirs(); File metainfDir = new File( context.getWebappDirectory(), META_INF_PATH ); metainfDir.mkdirs(); handleWebResources( context ); handeWebAppSourceDirectory( context ); // Debug mode: dump the path set for the current build PathSet pathSet = context.getWebappStructure().getStructure( "currentBuild" ); context.getLog().debug( "Dump of the current build pathSet content -->" ); for ( String path : pathSet ) { context.getLog().debug( path ); } context.getLog().debug( "-- end of dump --" ); handleDeploymentDescriptors( context, webinfDir, metainfDir ); handleClassesDirectory( context ); handleArtifacts( context ); } /** * Handles the web resources. * * @param context the packaging context * @throws MojoExecutionException if a resource could not be copied */ protected void handleWebResources( WarPackagingContext context ) throws MojoExecutionException { for ( Resource resource : webResources ) { // MWAR-246 if ( resource.getDirectory() == null ) { throw new MojoExecutionException( "The <directory> tag is missing from the <resource> tag." ); } if ( !( new File( resource.getDirectory() ) ).isAbsolute() ) { resource.setDirectory( context.getProject().getBasedir() + File.separator + resource.getDirectory() ); } // Make sure that the resource directory is not the same as the webappDirectory if ( !resource.getDirectory().equals( context.getWebappDirectory().getPath() ) ) { try { copyResources( context, resource ); } catch ( IOException e ) { throw new MojoExecutionException( "Could not copy resource [" + resource.getDirectory() + "]", e ); } } } } /** * Handles the webapp sources. * * @param context the packaging context * @throws MojoExecutionException if the sources could not be copied */ protected void handeWebAppSourceDirectory( WarPackagingContext context ) throws MojoExecutionException { // CHECKSTYLE_OFF: LineLength if ( !context.getWebappSourceDirectory().exists() ) { context.getLog().debug( "webapp sources directory does not exist - skipping." ); } else if ( !context.getWebappSourceDirectory().getAbsolutePath().equals( context.getWebappDirectory().getPath() ) ) { context.getLog().info( "Copying webapp resources [" + context.getWebappSourceDirectory() + "]" ); final PathSet sources = getFilesToIncludes( context.getWebappSourceDirectory(), context.getWebappSourceIncludes(), context.getWebappSourceExcludes(), context.isWebappSourceIncludeEmptyDirectories() ); try { copyFiles( id, context, context.getWebappSourceDirectory(), sources, false ); } catch ( IOException e ) { throw new MojoExecutionException( "Could not copy webapp sources [" + context.getWebappDirectory().getAbsolutePath() + "]", e ); } } // CHECKSTYLE_ON: LineLength } /** * Handles the webapp artifacts. * * @param context the packaging context * @throws MojoExecutionException if the artifacts could not be packaged */ protected void handleArtifacts( WarPackagingContext context ) throws MojoExecutionException { @SuppressWarnings( "unchecked" ) ArtifactsPackagingTask task = new ArtifactsPackagingTask( context.getProject().getArtifacts(), currentProjectOverlay ); task.performPackaging( context ); } /** * Handles the webapp classes. * * @param context the packaging context * @throws MojoExecutionException if the classes could not be packaged */ protected void handleClassesDirectory( WarPackagingContext context ) throws MojoExecutionException { ClassesPackagingTask task = new ClassesPackagingTask( currentProjectOverlay ); task.performPackaging( context ); } /** * Handles the deployment descriptors, if specified. Note that the behavior here is slightly different since the * customized entry always win, even if an overlay has already packaged a web.xml previously. * * @param context the packaging context * @param webinfDir the web-inf directory * @param metainfDir the meta-inf directory * @throws MojoFailureException if the web.xml is specified but does not exist * @throws MojoExecutionException if an error occurred while copying the descriptors */ protected void handleDeploymentDescriptors( WarPackagingContext context, File webinfDir, File metainfDir ) throws MojoFailureException, MojoExecutionException { try { if ( webXml != null && StringUtils.isNotEmpty( webXml.getName() ) ) { if ( !webXml.exists() ) { throw new MojoFailureException( "The specified web.xml file '" + webXml + "' does not exist" ); } // Making sure that it won't get overlayed context.getWebappStructure().registerFileForced( id, WEB_INF_PATH + "/web.xml" ); if ( context.isFilteringDeploymentDescriptors() ) { context.getMavenFileFilter().copyFile( webXml, new File( webinfDir, "web.xml" ), true, context.getFilterWrappers(), getEncoding( webXml ) ); } else { copyFile( context, webXml, new File( webinfDir, "web.xml" ), "WEB-INF/web.xml", true ); } } else { // the webXml can be the default one File defaultWebXml = new File( context.getWebappSourceDirectory(), WEB_INF_PATH + "/web.xml" ); // if exists we can filter it if ( defaultWebXml.exists() && context.isFilteringDeploymentDescriptors() ) { context.getWebappStructure().registerFile( id, WEB_INF_PATH + "/web.xml" ); context.getMavenFileFilter().copyFile( defaultWebXml, new File( webinfDir, "web.xml" ), true, context.getFilterWrappers(), getEncoding( defaultWebXml ) ); } } if ( containerConfigXML != null && StringUtils.isNotEmpty( containerConfigXML.getName() ) ) { String xmlFileName = containerConfigXML.getName(); context.getWebappStructure().registerFileForced( id, META_INF_PATH + "/" + xmlFileName ); if ( context.isFilteringDeploymentDescriptors() ) { context.getMavenFileFilter().copyFile( containerConfigXML, new File( metainfDir, xmlFileName ), true, context.getFilterWrappers(), getEncoding( containerConfigXML ) ); } else { copyFile( context, containerConfigXML, new File( metainfDir, xmlFileName ), "META-INF/" + xmlFileName, true ); } } } catch ( IOException e ) { throw new MojoExecutionException( "Failed to copy deployment descriptor", e ); } catch ( MavenFilteringException e ) { throw new MojoExecutionException( "Failed to copy deployment descriptor", e ); } } /** * Copies webapp webResources from the specified directory. * * @param context the WAR packaging context to use * @param resource the resource to copy * @throws IOException if an error occurred while copying the resources * @throws MojoExecutionException if an error occurred while retrieving the filter properties */ public void copyResources( WarPackagingContext context, Resource resource ) throws IOException, MojoExecutionException { if ( !context.getWebappDirectory().exists() ) { context.getLog().warn( "Not copying webapp webResources [" + resource.getDirectory() + "]: webapp directory [" + context.getWebappDirectory().getAbsolutePath() + "] does not exist!" ); } context.getLog().info( "Copying webapp webResources [" + resource.getDirectory() + "] to [" + context.getWebappDirectory().getAbsolutePath() + "]" ); String[] fileNames = getFilesToCopy( resource ); for ( String fileName : fileNames ) { String targetFileName = fileName; if ( resource.getTargetPath() != null ) { // TODO make sure this thing is 100% safe // MWAR-129 if targetPath is only a dot <targetPath>.</targetPath> or ./ // and the Resource is in a part of the warSourceDirectory the file from sources will override this // that's we don't have to add the targetPath yep not nice but works if ( !StringUtils.equals( ".", resource.getTargetPath() ) && !StringUtils.equals( "./", resource.getTargetPath() ) ) { targetFileName = resource.getTargetPath() + File.separator + targetFileName; } } if ( resource.isFiltering() && !context.isNonFilteredExtension( fileName ) ) { copyFilteredFile( id, context, new File( resource.getDirectory(), fileName ), targetFileName ); } else { copyFile( id, context, new File( resource.getDirectory(), fileName ), targetFileName ); } } } /** * Returns a list of filenames that should be copied over to the destination directory. * * @param resource the resource to be scanned * @return the array of filenames, relative to the sourceDir */ private String[] getFilesToCopy( Resource resource ) { // CHECKSTYLE_OFF: LineLength DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir( resource.getDirectory() ); if ( resource.getIncludes() != null && !resource.getIncludes().isEmpty() ) { scanner.setIncludes( (String[]) resource.getIncludes().toArray( new String[resource.getIncludes().size()] ) ); } else { scanner.setIncludes( DEFAULT_INCLUDES ); } if ( resource.getExcludes() != null && !resource.getExcludes().isEmpty() ) { scanner.setExcludes( (String[]) resource.getExcludes().toArray( new String[resource.getExcludes().size()] ) ); } scanner.addDefaultExcludes(); scanner.scan(); return scanner.getIncludedFiles(); // CHECKSTYLE_ON: LineLength } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1beta1/policytagmanager.proto package com.google.cloud.datacatalog.v1beta1; /** * * * <pre> * Request message for * [GetPolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.GetPolicyTagRequest} */ public final class GetPolicyTagRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.GetPolicyTagRequest) GetPolicyTagRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GetPolicyTagRequest.newBuilder() to construct. private GetPolicyTagRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetPolicyTagRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetPolicyTagRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetPolicyTagRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1beta1_GetPolicyTagRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1beta1_GetPolicyTagRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest.class, com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * Required. Resource name of the requested policy tag. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Resource name of the requested policy tag. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest)) { return super.equals(obj); } com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest other = (com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest) obj; if (!getName().equals(other.getName())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [GetPolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.GetPolicyTagRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.GetPolicyTagRequest) com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1beta1_GetPolicyTagRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1beta1_GetPolicyTagRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest.class, com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest.Builder.class); } // Construct using com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1beta1_GetPolicyTagRequest_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest build() { com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest buildPartial() { com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest result = new com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest(this); result.name_ = name_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest) { return mergeFrom((com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest other) { if (other == com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * * * <pre> * Required. Resource name of the requested policy tag. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Resource name of the requested policy tag. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Resource name of the requested policy tag. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * Required. Resource name of the requested policy tag. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * Required. Resource name of the requested policy tag. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.GetPolicyTagRequest) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GetPolicyTagRequest) private static final com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest(); } public static com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetPolicyTagRequest> PARSER = new com.google.protobuf.AbstractParser<GetPolicyTagRequest>() { @java.lang.Override public GetPolicyTagRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetPolicyTagRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GetPolicyTagRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetPolicyTagRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.GetPolicyTagRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (c) 2008, Richard Wallace. All Rights Reserved. * Copyright (c) 2011, Paul Merlin. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package org.qi4j.library.http; import java.util.Collections; import java.util.EnumSet; import java.util.Map; import java.util.UUID; import javax.servlet.DispatcherType; import javax.servlet.Filter; import javax.servlet.Servlet; import javax.servlet.ServletContextListener; import org.qi4j.api.service.ServiceComposite; import org.qi4j.bootstrap.AssemblyException; import org.qi4j.bootstrap.ModuleAssembly; import org.qi4j.library.http.ConstraintInfo.Constraint; import org.qi4j.library.http.ConstraintInfo.HttpMethod; import org.qi4j.library.http.Dispatchers.Dispatcher; import static org.qi4j.api.common.Visibility.layer; public final class Servlets { private Servlets() { } public static ContextListenerDeclaration listen() { return new ContextListenerDeclaration(); } public static ContextListenerAssembler addContextListeners( ContextListenerDeclaration... contextListenerDeclarations ) { return new ContextListenerAssembler( contextListenerDeclarations ); } public static class ContextListenerAssembler { final ContextListenerDeclaration[] contextListenerDeclarations; ContextListenerAssembler( ContextListenerDeclaration... eventListenerDeclarations ) { this.contextListenerDeclarations = eventListenerDeclarations; } public void to( ModuleAssembly module ) throws AssemblyException { for ( ContextListenerDeclaration contextListenerDeclaration : contextListenerDeclarations ) { module.services( contextListenerDeclaration.contextListener() ). setMetaInfo( contextListenerDeclaration.contextListenerInfo() ). instantiateOnStartup().visibleIn( layer ); } } } public static class ContextListenerDeclaration { Class<? extends ServiceComposite> contextListener; Map<String, String> initParams = Collections.emptyMap(); public <T extends ServletContextListener & ServiceComposite> ContextListenerDeclaration with( Class<T> contextListener ) { this.contextListener = contextListener; return this; } public Class<? extends ServiceComposite> contextListener() { return contextListener; } public ContextListenerDeclaration withInitParams( Map<String, String> initParams ) { this.initParams = initParams; return this; } private ContextListenerInfo contextListenerInfo() { return new ContextListenerInfo( initParams ); } } public static ServletDeclaration serve( String path ) { return new ServletDeclaration( path ); } public static ServletAssembler addServlets( ServletDeclaration... servletDeclarations ) { return new ServletAssembler( servletDeclarations ); } public static class ServletAssembler { final ServletDeclaration[] servletDeclarations; ServletAssembler( ServletDeclaration... servletDeclarations ) { this.servletDeclarations = servletDeclarations; } public void to( ModuleAssembly module ) throws AssemblyException { for ( ServletDeclaration servletDeclaration : servletDeclarations ) { module.services( servletDeclaration.servlet() ). setMetaInfo( servletDeclaration.servletInfo() ). instantiateOnStartup().visibleIn( layer ); } } } public static class ServletDeclaration { String path; Class<? extends ServiceComposite> servlet; Map<String, String> initParams = Collections.emptyMap(); ServletDeclaration( String path ) { this.path = path; } public <T extends Servlet & ServiceComposite> ServletDeclaration with( Class<T> servlet ) { this.servlet = servlet; return this; } public ServletDeclaration withInitParams( Map<String, String> initParams ) { this.initParams = initParams; return this; } Class<? extends ServiceComposite> servlet() { return servlet; } ServletInfo servletInfo() { return new ServletInfo( path, initParams ); } } public static FilterAssembler filter( String path ) { return new FilterAssembler( path ); } public static FilterDeclaration addFilters( FilterAssembler... filterAssemblers ) { return new FilterDeclaration( filterAssemblers ); } public static class FilterDeclaration { final FilterAssembler[] filterAssemblers; FilterDeclaration( FilterAssembler... filterAssemblers ) { this.filterAssemblers = filterAssemblers; } @SuppressWarnings( "unchecked" ) public void to( ModuleAssembly module ) throws AssemblyException { for ( FilterAssembler filterAssembler : filterAssemblers ) { module.services( filterAssembler.filter() ). setMetaInfo( filterAssembler.filterInfo() ). instantiateOnStartup().visibleIn( layer ); } } } public static class FilterAssembler { String path; Class<? extends ServiceComposite> filter; EnumSet<DispatcherType> dispatchers; Map<String, String> initParams = Collections.emptyMap(); FilterAssembler( String path ) { this.path = path; } public <T extends Filter & ServiceComposite> FilterAssembler through( Class<T> filter ) { this.filter = filter; return this; } public FilterAssembler on( DispatcherType first, DispatcherType... rest ) { dispatchers = EnumSet.of( first, rest ); return this; } @Deprecated public FilterAssembler on( Dispatcher first, Dispatcher... rest ) { EnumSet<DispatcherType> dispatch = EnumSet.noneOf( DispatcherType.class ); for ( Dispatcher each : Dispatchers.dispatchers( first, rest ) ) { switch ( each ) { case FORWARD: dispatch.add( DispatcherType.FORWARD ); break; case REQUEST: dispatch.add( DispatcherType.REQUEST ); break; } } dispatchers = dispatch; return this; } public FilterAssembler withInitParams( Map<String, String> initParams ) { this.initParams = initParams; return this; } Class<? extends ServiceComposite> filter() { return filter; } FilterInfo filterInfo() { return new FilterInfo( path, initParams, dispatchers ); } } public static ConstraintAssembler constrain( String path ) { return new ConstraintAssembler( path ); } public static ConstraintDeclaration addConstraints( ConstraintAssembler... constraintAssemblers ) { return new ConstraintDeclaration( constraintAssemblers ); } public static class ConstraintDeclaration { private final ConstraintAssembler[] constraintAssemblers; private ConstraintDeclaration( ConstraintAssembler[] constraintAssemblers ) { this.constraintAssemblers = constraintAssemblers; } public void to( ModuleAssembly module ) throws AssemblyException { // TODO Refactor adding Map<ServiceAssembly,T> ServiceDeclaration.getMetaInfos( Class<T> type ); in bootstrap & runtime // This would allow removing the ConstraintServices instances and this horrible hack with random UUIDs for ( ConstraintAssembler eachAssembler : constraintAssemblers ) { module.addServices( ConstraintService.class ).identifiedBy( UUID.randomUUID().toString() ).setMetaInfo( eachAssembler.constraintInfo() ); } } } public static class ConstraintAssembler { private final String path; private Constraint constraint; private HttpMethod[] omittedHttpMethods = new HttpMethod[]{}; private ConstraintAssembler( String path ) { this.path = path; } public ConstraintAssembler by( Constraint constraint ) { this.constraint = constraint; return this; } public ConstraintAssembler butNotOn( HttpMethod... omittedHttpMethods ) { this.omittedHttpMethods = omittedHttpMethods; return this; } ConstraintInfo constraintInfo() { return new ConstraintInfo( path, constraint, omittedHttpMethods ); } } }
/** * Copyright 2014 Confluent Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.confluent.kafka.formatter; import org.apache.avro.AvroRuntimeException; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.io.DatumReader; import org.apache.avro.io.DecoderFactory; import org.apache.avro.util.Utf8; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.config.ConfigException; import org.apache.kafka.common.errors.SerializationException; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig; import kafka.common.KafkaException; import kafka.producer.KeyedMessage; import kafka.common.MessageReader; import io.confluent.kafka.serializers.AbstractKafkaAvroSerializer; /** * Example * To use AvroMessageReader, first make sure that Zookeeper, Kafka and schema registry server are * all started. Second, make sure the jar for AvroMessageReader and its dependencies are included * in the classpath of kafka-console-producer.sh. Then run the following * command. * * 1. Send Avro string as value. (make sure there is no space in the schema string) * bin/kafka-console-producer.sh --broker-list localhost:9092 --topic t1 \ * --line-reader io.confluent.kafka.formatter.AvroMessageReader \ * --property schema.registry.url=http://localhost:8081 \ * --property value.schema='{"type":"string"}' * * In the shell, type in the following. * "a" * "b" * * 2. Send Avro record as value. * bin/kafka-console-producer.sh --broker-list localhost:9092 --topic t1 \ * --line-reader io.confluent.kafka.formatter.AvroMessageReader \ * --property schema.registry.url=http://localhost:8081 \ * --property value.schema='{"type":"record","name":"myrecord","fields":[{"name":"f1","type":"string"}]}' * * In the shell, type in the following. * {"f1": "value1"} * * 3. Send Avro string as key and Avro record as value. * bin/kafka-console-producer.sh --broker-list localhost:9092 --topic t1 \ * --line-reader io.confluent.kafka.formatter.AvroMessageReader \ * --property schema.registry.url=http://localhost:8081 \ * --property parse.key=true \ * --property key.schema='{"type":"string"}' \ * --property value.schema='{"type":"record","name":"myrecord","fields":[{"name":"f1","type":"string"}]}' * * In the shell, type in the following. * "key1" \t {"f1": "value1"} * */ public class AvroMessageReader extends AbstractKafkaAvroSerializer implements MessageReader { private String topic = null; private BufferedReader reader = null; private Boolean parseKey = false; private String keySeparator = "\t"; private boolean ignoreError = false; private final DecoderFactory decoderFactory = DecoderFactory.get(); private Schema keySchema = null; private Schema valueSchema = null; private String keySubject = null; private String valueSubject = null; /** * Constructor needed by kafka console producer. */ public AvroMessageReader() { } /** * For testing only. */ AvroMessageReader(SchemaRegistryClient schemaRegistryClient, Schema keySchema, Schema valueSchema, String topic, boolean parseKey, BufferedReader reader) { this.schemaRegistry = schemaRegistryClient; this.keySchema = keySchema; this.valueSchema = valueSchema; this.topic = topic; this.keySubject = topic + "-key"; this.valueSubject = topic + "-value"; this.parseKey = parseKey; this.reader = reader; } @Override public void init(java.io.InputStream inputStream, java.util.Properties props) { topic = props.getProperty("topic"); if (props.containsKey("parse.key")) { parseKey = props.getProperty("parse.key").trim().toLowerCase().equals("true"); } if (props.containsKey("key.separator")) { keySeparator = props.getProperty("key.separator"); } if (props.containsKey("ignore.error")) { ignoreError = props.getProperty("ignore.error").trim().toLowerCase().equals("true"); } reader = new BufferedReader(new InputStreamReader(inputStream)); String url = props.getProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG); if (url == null) { throw new ConfigException("Missing schema registry url!"); } schemaRegistry = new CachedSchemaRegistryClient( url, AbstractKafkaAvroSerDeConfig.MAX_SCHEMAS_PER_SUBJECT_DEFAULT); if (!props.containsKey("value.schema")) { throw new ConfigException("Must provide the Avro schema string in value.schema"); } String valueSchemaString = props.getProperty("value.schema"); Schema.Parser parser = new Schema.Parser(); valueSchema = parser.parse(valueSchemaString); if (parseKey) { if (!props.containsKey("key.schema")) { throw new ConfigException("Must provide the Avro schema string in key.schema"); } String keySchemaString = props.getProperty("key.schema"); keySchema = parser.parse(keySchemaString); } keySubject = topic + "-key"; valueSubject = topic + "-value"; } @Override public ProducerRecord<byte[], byte[]> readMessage() { try { String line = reader.readLine(); if (line == null) { return null; } if (!parseKey) { Object value = jsonToAvro(line, valueSchema); byte[] serializedValue = serializeImpl(valueSubject, value); return new ProducerRecord<>(topic, serializedValue); } else { int keyIndex = line.indexOf(keySeparator); if (keyIndex < 0) { if (ignoreError) { Object value = jsonToAvro(line, valueSchema); byte[] serializedValue = serializeImpl(valueSubject, value); return new ProducerRecord<>(topic, serializedValue); } else { throw new KafkaException("No key found in line " + line); } } else { String keyString = line.substring(0, keyIndex); String valueString = (keyIndex + keySeparator.length() > line.length()) ? "" : line.substring(keyIndex + keySeparator.length()); Object key = jsonToAvro(keyString, keySchema); byte[] serializedKey = serializeImpl(keySubject, key); Object value = jsonToAvro(valueString, valueSchema); byte[] serializedValue = serializeImpl(valueSubject, value); return new ProducerRecord<>(topic, serializedKey, serializedValue); } } } catch (IOException e) { throw new KafkaException("Error reading from input", e); } } private Object jsonToAvro(String jsonString, Schema schema) { try { DatumReader<Object> reader = new GenericDatumReader<Object>(schema); Object object = reader.read(null, decoderFactory.jsonDecoder(schema, jsonString)); if (schema.getType().equals(Schema.Type.STRING)) { object = ((Utf8) object).toString(); } return object; } catch (IOException e) { throw new SerializationException( String.format("Error deserializing json %s to Avro of schema %s", jsonString, schema), e); } catch (AvroRuntimeException e) { throw new SerializationException( String.format("Error deserializing json %s to Avro of schema %s", jsonString, schema), e); } } @Override public void close() { // nothing to do } }
/* * Copyright 2011-2012 Amazon Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://aws.amazon.com/apache2.0 * * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES * OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.eclipse.explorer.sqs; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.ALL; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.ARN; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.CREATED; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.DELAY_SECONDS; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.MAX_MESSAGE_SIZE; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.NUMBER_OF_MESSAGES; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.RETENTION_PERIOD; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.SENDER_ID; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.SENT; import static com.amazonaws.eclipse.explorer.sqs.QueueAttributes.VISIBILITY_TIMEOUT; import java.text.DateFormat; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IMenuListener; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.layout.GridDataFactory; import org.eclipse.jface.layout.TreeColumnLayout; import org.eclipse.jface.resource.JFaceResources; import org.eclipse.jface.viewers.ColumnWeightData; import org.eclipse.jface.viewers.ILabelProviderListener; import org.eclipse.jface.viewers.ITableLabelProvider; import org.eclipse.jface.viewers.ITreePathContentProvider; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.TreePath; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.viewers.Viewer; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeColumn; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.PartInitException; import org.eclipse.ui.forms.IFormColors; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.ui.forms.widgets.ScrolledForm; import org.eclipse.ui.part.EditorPart; import com.amazonaws.eclipse.core.AWSClientFactory; import com.amazonaws.eclipse.core.AwsToolkitCore; import com.amazonaws.eclipse.core.ui.IRefreshable; import com.amazonaws.services.sqs.AmazonSQS; import com.amazonaws.services.sqs.model.DeleteMessageRequest; import com.amazonaws.services.sqs.model.GetQueueAttributesRequest; import com.amazonaws.services.sqs.model.Message; import com.amazonaws.services.sqs.model.ReceiveMessageRequest; public class QueueEditor extends EditorPart implements IRefreshable { private QueueEditorInput queueEditorInput; private Label retentionPeriodLabel; private Label maxMessageSizeLabel; private Label createdLabel; private Label visibilityTimeoutLabel; private Label queueArnLabel; private Label numberOfMessagesLabel; private TreeViewer viewer; private Label queueDelayLabel; @Override public void doSave(IProgressMonitor arg0) {} @Override public void doSaveAs() {} @Override public void setFocus() {} @Override public boolean isDirty() { return false; } @Override public boolean isSaveAsAllowed() { return false; } @Override public void init(IEditorSite site, IEditorInput input) throws PartInitException { setSite(site); setInput(input); setPartName(input.getName()); queueEditorInput = (QueueEditorInput)input; } @Override public void createPartControl(Composite parent) { FormToolkit toolkit = new FormToolkit(Display.getDefault()); ScrolledForm form = new ScrolledForm(parent, SWT.V_SCROLL); form.setExpandHorizontal(true); form.setExpandVertical(true); form.setBackground(toolkit.getColors().getBackground()); form.setForeground(toolkit.getColors().getColor(IFormColors.TITLE)); form.setFont(JFaceResources.getHeaderFont()); form.setText(queueEditorInput.getName()); toolkit.decorateFormHeading(form.getForm()); form.setImage(AwsToolkitCore.getDefault().getImageRegistry().get(AwsToolkitCore.IMAGE_QUEUE)); form.getBody().setLayout(new GridLayout()); createQueueSummaryInfoSection(form, toolkit); createMessageList(form, toolkit); form.getToolBarManager().add(new RefreshAction()); form.getToolBarManager().add(new Separator()); form.getToolBarManager().add(new AddMessageAction(getClient(), queueEditorInput.getQueueUrl(), this)); form.getToolBarManager().update(true); } private class DeleteMessageAction extends Action { public DeleteMessageAction() { this.setText("Delete"); this.setToolTipText("Delete this message from the queue"); this.setImageDescriptor(AwsToolkitCore.getDefault().getImageRegistry().getDescriptor(AwsToolkitCore.IMAGE_REMOVE)); } @Override public boolean isEnabled() { return !viewer.getSelection().isEmpty(); } @Override public void run() { StructuredSelection selection = (StructuredSelection)viewer.getSelection(); Iterator<Message> iterator = selection.iterator(); while (iterator.hasNext()) { Message message = iterator.next(); getClient().deleteMessage(new DeleteMessageRequest(queueEditorInput.getQueueUrl(), message.getReceiptHandle())); } new RefreshAction().run(); } } private class RefreshAction extends Action { public RefreshAction() { this.setText("Refresh"); this.setToolTipText("Refresh queue message sampling"); this.setImageDescriptor(AwsToolkitCore.getDefault().getImageRegistry().getDescriptor(AwsToolkitCore.IMAGE_REFRESH)); } @Override public void run() { new LoadMessagesThread().start(); new LoadQueueAttributesThread().start(); } } private void createQueueSummaryInfoSection(final ScrolledForm form, final FormToolkit toolkit) { GridDataFactory gridDataFactory = GridDataFactory.swtDefaults().align(SWT.FILL, SWT.TOP).grab(true, false).minSize(200, SWT.DEFAULT).hint(200, SWT.DEFAULT); Composite composite = toolkit.createComposite(form.getBody()); composite.setLayout(new GridLayout(2, false)); composite.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false)); toolkit.createLabel(composite, "Retention Period:"); retentionPeriodLabel = toolkit.createLabel(composite, ""); gridDataFactory.applyTo(retentionPeriodLabel); toolkit.createLabel(composite, "Max Message Size:", SWT.BORDER); maxMessageSizeLabel = toolkit.createLabel(composite, "", SWT.BORDER); gridDataFactory.applyTo(maxMessageSizeLabel); toolkit.createLabel(composite, "Created:"); createdLabel = toolkit.createLabel(composite, ""); gridDataFactory.applyTo(createdLabel); toolkit.createLabel(composite, "Visibility Timeout:"); visibilityTimeoutLabel = toolkit.createLabel(composite, ""); gridDataFactory.applyTo(visibilityTimeoutLabel); toolkit.createLabel(composite, "Queue ARN:"); queueArnLabel = toolkit.createLabel(composite, ""); gridDataFactory.applyTo(queueArnLabel); toolkit.createLabel(composite, "Approx. Message Count:"); numberOfMessagesLabel = toolkit.createLabel(composite, ""); gridDataFactory.applyTo(numberOfMessagesLabel); toolkit.createLabel(composite, "Message Delay (seconds):"); queueDelayLabel = toolkit.createLabel(composite, ""); gridDataFactory.applyTo(queueDelayLabel); new LoadQueueAttributesThread().start(); } private AmazonSQS getClient() { AWSClientFactory clientFactory = AwsToolkitCore.getClientFactory(queueEditorInput.getAccountId()); return clientFactory.getSQSClientByEndpoint(queueEditorInput.getRegionEndpoint()); } private class LoadQueueAttributesThread extends Thread { @Override public void run() { GetQueueAttributesRequest request = new GetQueueAttributesRequest(queueEditorInput.getQueueUrl()).withAttributeNames(ALL); final Map<String, String> attributes = getClient().getQueueAttributes(request).getAttributes(); Display.getDefault().asyncExec(new Runnable() { @Override public void run() { retentionPeriodLabel.setText(attributes.get(RETENTION_PERIOD)); maxMessageSizeLabel.setText(attributes.get(MAX_MESSAGE_SIZE)); createdLabel.setText(attributes.get(CREATED)); visibilityTimeoutLabel.setText(attributes.get(VISIBILITY_TIMEOUT)); queueArnLabel.setText(attributes.get(ARN)); numberOfMessagesLabel.setText(attributes.get(NUMBER_OF_MESSAGES)); queueDelayLabel.setText(valueOrDefault(attributes.get(DELAY_SECONDS), "0")); numberOfMessagesLabel.getParent().layout(); } }); } private String valueOrDefault(String value, String defaultValue) { if (value != null) return value; else return defaultValue; } } private class LoadMessagesThread extends Thread { @Override public void run() { final Map<String, Message> messagesById = new HashMap<>(); for (int i = 0; i < 5; i++) { ReceiveMessageRequest request = new ReceiveMessageRequest(queueEditorInput.getQueueUrl()).withVisibilityTimeout(0).withMaxNumberOfMessages(10).withAttributeNames(ALL); List<Message> messages = getClient().receiveMessage(request).getMessages(); for (Message message : messages) { messagesById.put(message.getMessageId(), message); } } Display.getDefault().asyncExec(new Runnable() { @Override public void run() { viewer.setInput(messagesById.values()); } }); } } private final class MessageContentProvider implements ITreePathContentProvider { private Message[] messages; @Override public void dispose() {} @Override public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { if (newInput instanceof Collection) { messages = ((Collection<Message>)newInput).toArray(new Message[0]); } else { messages = new Message[0]; } } @Override public Object[] getChildren(TreePath arg0) { return null; } @Override public Object[] getElements(Object arg0) { return messages; } @Override public TreePath[] getParents(Object arg0) { return new TreePath[0]; } @Override public boolean hasChildren(TreePath arg0) { return false; } } private final class MessageLabelProvider implements ITableLabelProvider { private final DateFormat dateFormat; public MessageLabelProvider() { dateFormat = DateFormat.getDateTimeInstance(); } @Override public void addListener(ILabelProviderListener arg0) {} @Override public void removeListener(ILabelProviderListener arg0) {} @Override public void dispose() {} @Override public boolean isLabelProperty(Object obj, String column) { return false; } @Override public Image getColumnImage(Object obj, int column) { return null; } @Override public String getColumnText(Object obj, int column) { if (obj instanceof Message == false) return ""; Message message = (Message)obj; Map<String, String> attributes = message.getAttributes(); switch (column) { case 0: return message.getMessageId(); case 1: return message.getBody(); case 2: return formatDate(attributes.get(SENT)); case 3: return attributes.get(SENDER_ID); } return ""; } private String formatDate(String epochString) { if (epochString == null || epochString.trim().length() == 0) return ""; long epochSeconds = Long.parseLong(epochString); return dateFormat.format(new Date(epochSeconds)); } } private void createMessageList(final ScrolledForm form, final FormToolkit toolkit) { Composite parent = toolkit.createComposite(form.getBody()); parent.setLayout(new GridLayout()); parent.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); Label label = toolkit.createLabel(parent, "Message Sampling"); label.setFont(JFaceResources.getHeaderFont()); label.setForeground(toolkit.getColors().getColor(IFormColors.TITLE)); label.setLayoutData(new GridData(SWT.FILL, SWT.DEFAULT, true, false)); Composite composite = toolkit.createComposite(parent); composite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); TreeColumnLayout tableColumnLayout = new TreeColumnLayout(); composite.setLayout(tableColumnLayout); MessageContentProvider contentProvider = new MessageContentProvider(); MessageLabelProvider labelProvider = new MessageLabelProvider(); viewer = new TreeViewer(composite, SWT.BORDER | SWT.MULTI); viewer.getTree().setLinesVisible(true); viewer.getTree().setHeaderVisible(true); viewer.setLabelProvider(labelProvider); viewer.setContentProvider(contentProvider); createColumns(tableColumnLayout, viewer.getTree()); viewer.setInput(new Object()); MenuManager menuManager = new MenuManager(); menuManager.setRemoveAllWhenShown(true); menuManager.addMenuListener(new IMenuListener() { @Override public void menuAboutToShow(IMenuManager manager) { manager.add(new DeleteMessageAction()); } }); Menu menu = menuManager.createContextMenu(viewer.getTree()); viewer.getTree().setMenu(menu); getSite().registerContextMenu(menuManager, viewer); new LoadMessagesThread().start(); } private void createColumns(TreeColumnLayout columnLayout, Tree tree) { createColumn(tree, columnLayout, "ID"); createColumn(tree, columnLayout, "Body"); createColumn(tree, columnLayout, "Sent"); createColumn(tree, columnLayout, "Sender"); } private TreeColumn createColumn(Tree tree, TreeColumnLayout columnLayout, String text) { TreeColumn column = new TreeColumn(tree, SWT.NONE); column.setText(text); column.setMoveable(true); columnLayout.setColumnData(column, new ColumnWeightData(30)); return column; } @Override public void refreshData() { new RefreshAction().run(); } }
/* Copyright 2016 Goldman Sachs. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Portions copyright Hiroshi Ito. Licensed under Apache 2.0 license package com.gs.fw.common.mithra.attribute; import com.gs.fw.common.mithra.MithraObjectPortal; import com.gs.fw.common.mithra.extractor.ChainedAttributeValueSelector; import com.gs.fw.common.mithra.extractor.Function; import com.gs.fw.common.mithra.finder.ChainedMapper; import com.gs.fw.common.mithra.finder.MappedOperation; import com.gs.fw.common.mithra.finder.Mapper; import com.gs.fw.common.mithra.finder.Operation; import com.gs.fw.common.mithra.util.serializer.ReladomoSerializationContext; import com.gs.fw.common.mithra.util.serializer.SerialWriter; import java.io.IOException; import java.sql.Timestamp; import java.util.Date; public class MappedAsOfAttribute<T> extends AsOfAttribute<T> implements MappedAttribute { private AsOfAttribute wrappedAttribute; private Mapper mapper; private Function parentSelector; public MappedAsOfAttribute(AsOfAttribute wrappedAttribute, Mapper mapper, Function parentSelector) { super(wrappedAttribute.getAttributeName(), wrappedAttribute.getBusClassNameWithDots(), wrappedAttribute.getBusClassName(), wrappedAttribute.isNullable(), false, null, null, false, false, wrappedAttribute.getFromAttribute(), wrappedAttribute.getToAttribute(), wrappedAttribute.getInfinityDate(), wrappedAttribute.isFutureExpiringRowsExist(), wrappedAttribute.isToIsInclusive(), wrappedAttribute.getDefaultDate(), wrappedAttribute.isProcessingDate()); this.wrappedAttribute = wrappedAttribute; this.mapper = mapper; this.parentSelector = parentSelector; while (this.wrappedAttribute instanceof MappedAsOfAttribute) { MappedAsOfAttribute ma = (MappedAsOfAttribute) this.wrappedAttribute; this.mapper = new ChainedMapper(this.mapper, ma.getMapper()); this.wrappedAttribute = (AsOfAttribute) ma.getWrappedAttribute(); this.parentSelector = new ChainedAttributeValueSelector(this.parentSelector, ma.getParentSelector()); } } @Override public String getAttributeName() { if (super.getAttributeName() == null) { computeMappedAttributeName(this.wrappedAttribute, this.parentSelector); } return super.getAttributeName(); } @Override public String zGetTopOwnerClassName() { return this.mapper.getResultOwnerClassName(); } @Override public Attribute getSourceAttribute() { return this.wrappedAttribute.getSourceAttribute(); } @Override public SourceAttributeType getSourceAttributeType() { return this.wrappedAttribute.getSourceAttributeType(); } @Override public AsOfAttribute[] getAsOfAttributes() { return this.wrappedAttribute.getAsOfAttributes(); } public Function getParentSelector() { return parentSelector; } public MappedAttribute cloneForNewMapper(Mapper mapper, Function parentSelector) { return new MappedAsOfAttribute((AsOfAttribute) this.getWrappedAttribute(), mapper, parentSelector); } public Attribute getWrappedAttribute() { return wrappedAttribute; } public Mapper getMapper() { return mapper; } @Override public Operation isNull() { return new MappedOperation(this.mapper, this.wrappedAttribute.isNull()); } public Timestamp timestampValueOf(Object o) { if (parentSelector == null) return null; Object result = parentSelector.valueOf(o); if (result == null) return null; return this.wrappedAttribute.timestampValueOf(result); } @Override public Operation eq(Timestamp other) { return new MappedOperation(this.mapper, this.wrappedAttribute.eq(other)); } @Override public Operation eq(Date other) { return new MappedOperation(this.mapper, this.wrappedAttribute.eq(other)); } @Override public Operation equalsEdgePoint() { return new MappedOperation(this.mapper, this.wrappedAttribute.equalsEdgePoint()); } @Override public boolean isAttributeNull(Object o) { if (parentSelector == null) return true; Object result = parentSelector.valueOf(o); if (result == null) return true; return this.wrappedAttribute.isAttributeNull(result); } public Object readResolve() { return this; } public boolean equals(Object other) { if (other instanceof MappedAsOfAttribute) { MappedAsOfAttribute o = (MappedAsOfAttribute) other; return this.wrappedAttribute.equals(o.wrappedAttribute) && this.mapper.equals(o.mapper); } return false; } public int hashCode() { return this.wrappedAttribute.hashCode() ^ this.mapper.hashCode(); } @Override public MithraObjectPortal getOwnerPortal() { return this.wrappedAttribute.getOwnerPortal(); } @Override public MithraObjectPortal getTopLevelPortal() { return this.mapper.getResultPortal(); } @Override public TupleAttribute tupleWith(Attribute attr) { return MappedAttributeUtil.tupleWith(this, attr); } @Override public TupleAttribute tupleWith(Attribute... attrs) { return MappedAttributeUtil.tupleWith(this, attrs); } @Override public TupleAttribute tupleWith(TupleAttribute attr) { return MappedAttributeUtil.tupleWith(this, attr); } @Override public boolean isNullable() { return this.wrappedAttribute.isNullable(); } @Override protected void zWriteNonNullSerial(ReladomoSerializationContext context, SerialWriter writer, T reladomoObject) throws IOException { throw new RuntimeException("should not get here"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.segment.file; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newLinkedHashMap; import static com.google.common.collect.Sets.newHashSet; import static java.lang.Integer.getInteger; import static java.lang.String.format; import static java.lang.System.currentTimeMillis; import static java.lang.Thread.currentThread; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.concurrent.TimeUnit.SECONDS; import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount; import static org.apache.jackrabbit.oak.commons.PathUtils.elements; import static org.apache.jackrabbit.oak.commons.PathUtils.getName; import static org.apache.jackrabbit.oak.commons.PathUtils.getParentPath; import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE; import static org.apache.jackrabbit.oak.segment.DefaultSegmentWriterBuilder.defaultSegmentWriterBuilder; import static org.apache.jackrabbit.oak.segment.SegmentId.isDataSegmentId; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.CLEANUP; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPACTION; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPACTION_FORCE_COMPACT; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPACTION_RETRY; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.ESTIMATION; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.IDLE; import static org.apache.jackrabbit.oak.segment.file.TarRevisions.EXPEDITE_OPTION; import static org.apache.jackrabbit.oak.segment.file.TarRevisions.timeout; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileLock; import java.nio.channels.OverlappingFileLockException; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.base.Stopwatch; import com.google.common.base.Supplier; import com.google.common.io.Closer; import org.apache.jackrabbit.oak.segment.Compactor; import org.apache.jackrabbit.oak.segment.RecordId; import org.apache.jackrabbit.oak.segment.Segment; import org.apache.jackrabbit.oak.segment.SegmentId; import org.apache.jackrabbit.oak.segment.SegmentNodeBuilder; import org.apache.jackrabbit.oak.segment.SegmentNodeState; import org.apache.jackrabbit.oak.segment.SegmentNotFoundException; import org.apache.jackrabbit.oak.segment.SegmentNotFoundExceptionListener; import org.apache.jackrabbit.oak.segment.SegmentWriter; import org.apache.jackrabbit.oak.segment.WriterCacheManager; import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions; import org.apache.jackrabbit.oak.segment.file.GCJournal.GCJournalEntry; import org.apache.jackrabbit.oak.segment.file.ShutDown.ShutDownCloser; import org.apache.jackrabbit.oak.segment.file.tar.CleanupContext; import org.apache.jackrabbit.oak.segment.file.tar.GCGeneration; import org.apache.jackrabbit.oak.segment.file.tar.TarFiles; import org.apache.jackrabbit.oak.segment.file.tar.TarFiles.CleanupResult; import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.stats.StatisticsProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The storage implementation for tar files. */ public class FileStore extends AbstractFileStore { private static final Logger log = LoggerFactory.getLogger(FileStore.class); /** * Minimal interval in milli seconds between subsequent garbage collection cycles. * Garbage collection invoked via {@link #fullGC()} will be skipped unless at least * the specified time has passed since its last successful invocation. */ private static final long GC_BACKOFF = getInteger("oak.gc.backoff", 10*3600*1000); private static final int MB = 1024 * 1024; static final String LOCK_FILE_NAME = "repo.lock"; /** * GC counter for logging purposes */ private static final AtomicLong GC_COUNT = new AtomicLong(0); @Nonnull private final SegmentWriter segmentWriter; @Nonnull private final GarbageCollector garbageCollector; private final TarFiles tarFiles; private final RandomAccessFile lockFile; private final FileLock lock; private TarRevisions revisions; /** * Scheduler for running <em>short</em> background operations */ private final Scheduler fileStoreScheduler = new Scheduler("FileStore background tasks"); /** * List of old tar file generations that are waiting to be removed. They can * not be removed immediately, because they first need to be closed, and the * JVM needs to release the memory mapped file references. */ private final FileReaper fileReaper = new FileReaper(); /** * This flag is periodically updated by calling the {@code SegmentGCOptions} * at regular intervals. */ private final AtomicBoolean sufficientDiskSpace = new AtomicBoolean(true); /** * This flag is raised whenever the available memory falls under a specified * threshold. See {@link GCMemoryBarrier} */ private final AtomicBoolean sufficientMemory = new AtomicBoolean(true); private final FileStoreStats stats; private final ShutDown shutDown = new ShutDown(); @Nonnull private final SegmentNotFoundExceptionListener snfeListener; FileStore(final FileStoreBuilder builder) throws InvalidFileStoreVersionException, IOException { super(builder); lockFile = new RandomAccessFile(new File(directory, LOCK_FILE_NAME), "rw"); try { lock = lockFile.getChannel().lock(); } catch (OverlappingFileLockException ex) { throw new IllegalStateException(directory.getAbsolutePath() + " is in use by another store.", ex); } this.segmentWriter = defaultSegmentWriterBuilder("sys") .withGeneration(() -> getGcGeneration().nonGC()) .withWriterPool() .with(builder.getCacheManager() .withAccessTracking("WRITE", builder.getStatsProvider())) .build(this); this.garbageCollector = new GarbageCollector( builder.getGcOptions(), builder.getGcListener(), new GCJournal(directory), builder.getCacheManager(), builder.getStatsProvider()); newManifestChecker(directory, builder.getStrictVersionCheck()).checkAndUpdateManifest(); this.stats = new FileStoreStats(builder.getStatsProvider(), this, 0); this.tarFiles = TarFiles.builder() .withDirectory(directory) .withMemoryMapping(memoryMapping) .withTarRecovery(recovery) .withIOMonitor(ioMonitor) .withFileStoreMonitor(stats) .withMaxFileSize(builder.getMaxFileSize() * MB) .build(); this.stats.init(this.tarFiles.size()); this.snfeListener = builder.getSnfeListener(); fileStoreScheduler.scheduleAtFixedRate( format("TarMK flush [%s]", directory), 5, SECONDS, new Runnable() { @Override public void run() { if (shutDown.shutDownRequested()) { return; } try { flush(); } catch (IOException e) { log.warn("Failed to flush the TarMK at {}", directory, e); } } }); fileStoreScheduler.scheduleAtFixedRate( format("TarMK filer reaper [%s]", directory), 5, SECONDS, new Runnable() { @Override public void run() { fileReaper.reap(); } }); fileStoreScheduler.scheduleAtFixedRate( format("TarMK disk space check [%s]", directory), 1, MINUTES, new Runnable() { final SegmentGCOptions gcOptions = builder.getGcOptions(); @Override public void run() { checkDiskSpace(gcOptions); } }); log.info("TarMK opened: {} (mmap={})", directory, memoryMapping); log.debug("TAR files: {}", tarFiles); } FileStore bind(TarRevisions revisions) throws IOException { try (ShutDownCloser ignored = shutDown.keepAlive()) { this.revisions = revisions; this.revisions.bind(this, tracker, initialNode()); return this; } } @Nonnull private Supplier<RecordId> initialNode() { return new Supplier<RecordId>() { @Override public RecordId get() { try { SegmentWriter writer = defaultSegmentWriterBuilder("init").build(FileStore.this); NodeBuilder builder = EMPTY_NODE.builder(); builder.setChildNode("root", EMPTY_NODE); SegmentNodeState node = new SegmentNodeState(segmentReader, writer, getBlobStore(), writer.writeNode(builder.getNodeState())); writer.flush(); return node.getRecordId(); } catch (IOException e) { String msg = "Failed to write initial node"; log.error(msg, e); throw new IllegalStateException(msg, e); } } }; } @Nonnull private GCGeneration getGcGeneration() { return revisions.getHead().getSegmentId().getGcGeneration(); } /** * @return a runnable for running garbage collection */ public Runnable getGCRunner() { return new SafeRunnable(format("TarMK revision gc [%s]", directory), () -> { try (ShutDownCloser ignored = shutDown.keepAlive()) { garbageCollector.run(); } catch (IOException e) { log.error("Error running revision garbage collection", e); } }); } /** * @return the currently active gc write monitor */ public GCNodeWriteMonitor getGCNodeWriteMonitor() { return garbageCollector.getGCNodeWriteMonitor(); } /** * @return the size of this store. */ private long size() { try (ShutDownCloser ignored = shutDown.keepAlive()) { return tarFiles.size(); } } public int readerCount() { try (ShutDownCloser ignored = shutDown.keepAlive()) { return tarFiles.readerCount(); } } public FileStoreStats getStats() { return stats; } private void doFlush() throws IOException { if (revisions == null) { return; } revisions.flush(() -> { segmentWriter.flush(); tarFiles.flush(); stats.flushed(); return null; }); } public void flush() throws IOException { try (ShutDownCloser ignored = shutDown.keepAlive()) { doFlush(); } } /** * Run full garbage collection: estimation, compaction, cleanup. */ public void fullGC() throws IOException { try (ShutDownCloser ignored = shutDown.keepAlive()) { garbageCollector.runFull(); } } /** * Run tail garbage collection. */ public void tailGC() throws IOException { try (ShutDownCloser ignored = shutDown.keepAlive()) { garbageCollector.runTail(); } } /** * Run the compaction gain estimation process. * @return */ public GCEstimation estimateCompactionGain() { try (ShutDownCloser ignored = shutDown.keepAlive()) { return garbageCollector.estimateCompactionGain(); } } /** * Copy every referenced record in data (non-bulk) segments. Bulk segments * are fully kept (they are only removed in cleanup, if there is no * reference to them). * @return {@code true} on success, {@code false} otherwise. */ public boolean compactFull() { try (ShutDownCloser ignored = shutDown.keepAlive()) { return garbageCollector.compactFull().isSuccess(); } } public boolean compactTail() { try (ShutDownCloser ignored = shutDown.keepAlive()) { return garbageCollector.compactTail().isSuccess(); } } /** * Run garbage collection on the segment level: reclaim those data segments * that are from an old segment generation and those bulk segments that are not * reachable anymore. * Those tar files that shrink by at least 25% are rewritten to a new tar generation * skipping the reclaimed segments. */ public void cleanup() throws IOException { try (ShutDownCloser ignored = shutDown.keepAlive()) { fileReaper.add(garbageCollector.cleanup(CompactionResult.skipped( getGcGeneration(), garbageCollector.gcOptions, revisions.getHead() ))); } } /** * Finds all external blob references that are currently accessible * in this repository and adds them to the given collector. Useful * for collecting garbage in an external data store. * <p> * Note that this method only collects blob references that are already * stored in the repository (at the time when this method is called), so * the garbage collector will need some other mechanism for tracking * in-memory references and references stored while this method is * running. * @param collector reference collector called back for each blob reference found */ public void collectBlobReferences(Consumer<String> collector) throws IOException { try (ShutDownCloser ignored = shutDown.keepAlive()) { garbageCollector.collectBlobReferences(collector); } } /** * Cancel a running revision garbage collection compaction process as soon as possible. * Does nothing if gc is not running. */ public void cancelGC() { garbageCollector.cancel(); } @Override @Nonnull public SegmentWriter getWriter() { try (ShutDownCloser ignored = shutDown.keepAlive()) { return segmentWriter; } } @Override @Nonnull public TarRevisions getRevisions() { try (ShutDownCloser ignored = shutDown.keepAlive()) { return revisions; } } @Override public void close() { try (ShutDownCloser ignored = shutDown.shutDown()) { // avoid deadlocks by closing (and joining) the background // thread before acquiring the synchronization lock fileStoreScheduler.close(); try { doFlush(); } catch (IOException e) { log.warn("Unable to flush the store", e); } Closer closer = Closer.create(); closer.register(revisions); if (lock != null) { try { lock.release(); } catch (IOException e) { log.warn("Unable to release the file lock", e); } } closer.register(lockFile); closer.register(tarFiles); closeAndLogOnFail(closer); } // Try removing pending files in case the scheduler didn't have a chance to run yet System.gc(); // for any memory-mappings that are no longer used fileReaper.reap(); log.info("TarMK closed: {}", directory); } @Override public boolean containsSegment(SegmentId id) { try (ShutDownCloser ignored = shutDown.keepAlive()) { return tarFiles.containsSegment(id.getMostSignificantBits(), id.getLeastSignificantBits()); } } @Override @Nonnull public Segment readSegment(final SegmentId id) { try (ShutDownCloser ignored = shutDown.keepAlive()) { return segmentCache.getSegment(id, () -> readSegmentUncached(tarFiles, id)); } catch (ExecutionException e) { SegmentNotFoundException snfe = asSegmentNotFoundException(e, id); snfeListener.notify(id, snfe); throw snfe; } } @Override public void writeSegment(SegmentId id, byte[] buffer, int offset, int length) throws IOException { try (ShutDownCloser ignored = shutDown.keepAlive()) { Segment segment = null; // If the segment is a data segment, create a new instance of Segment to // access some internal information stored in the segment and to store // in an in-memory cache for later use. GCGeneration generation = GCGeneration.NULL; Set<UUID> references = null; Set<String> binaryReferences = null; if (id.isDataSegmentId()) { ByteBuffer data; if (offset > 4096) { data = ByteBuffer.allocate(length); data.put(buffer, offset, length); data.rewind(); } else { data = ByteBuffer.wrap(buffer, offset, length); } segment = new Segment(tracker, segmentReader, id, data); generation = segment.getGcGeneration(); references = readReferences(segment); binaryReferences = readBinaryReferences(segment); } tarFiles.writeSegment( id.asUUID(), buffer, offset, length, generation, references, binaryReferences ); // Keep this data segment in memory as it's likely to be accessed soon. if (segment != null) { segmentCache.putSegment(segment); } } } private void checkDiskSpace(SegmentGCOptions gcOptions) { long repositoryDiskSpace = size(); long availableDiskSpace = directory.getFreeSpace(); boolean updated = SegmentGCOptions.isDiskSpaceSufficient(repositoryDiskSpace, availableDiskSpace); boolean previous = sufficientDiskSpace.getAndSet(updated); if (previous && !updated) { log.warn("Available disk space ({}) is too low, current repository size is approx. {}", humanReadableByteCount(availableDiskSpace), humanReadableByteCount(repositoryDiskSpace)); } if (updated && !previous) { log.info("Available disk space ({}) is sufficient again for repository operations, current repository size is approx. {}", humanReadableByteCount(availableDiskSpace), humanReadableByteCount(repositoryDiskSpace)); } } private class GarbageCollector { @Nonnull private final SegmentGCOptions gcOptions; /** * {@code GcListener} listening to this instance's gc progress */ @Nonnull private final GCListener gcListener; @Nonnull private final GCJournal gcJournal; @Nonnull private final WriterCacheManager cacheManager; @Nonnull private final StatisticsProvider statisticsProvider; @Nonnull private GCNodeWriteMonitor compactionMonitor = GCNodeWriteMonitor.EMPTY; private volatile boolean cancelled; /** * Timestamp of the last time {@link #fullGC()} or {@link #tailGC()} was * successfully invoked. 0 if never. */ private long lastSuccessfullGC; GarbageCollector( @Nonnull SegmentGCOptions gcOptions, @Nonnull GCListener gcListener, @Nonnull GCJournal gcJournal, @Nonnull WriterCacheManager cacheManager, @Nonnull StatisticsProvider statisticsProvider) { this.gcOptions = gcOptions; this.gcListener = gcListener; this.gcJournal = gcJournal; this.cacheManager = cacheManager; this.statisticsProvider = statisticsProvider; } GCNodeWriteMonitor getGCNodeWriteMonitor() { return compactionMonitor; } synchronized void run() throws IOException { switch (gcOptions.getGCType()) { case FULL: runFull(); break; case TAIL: runTail(); break; default: throw new IllegalStateException("Invalid GC type"); } } synchronized void runFull() throws IOException { run(this::compactFull); } synchronized void runTail() throws IOException { run(this::compactTail); } private void run(Supplier<CompactionResult> compact) throws IOException { try { gcListener.info("TarMK GC #{}: started", GC_COUNT.incrementAndGet()); long dt = System.currentTimeMillis() - lastSuccessfullGC; if (dt < GC_BACKOFF) { gcListener.skipped("TarMK GC #{}: skipping garbage collection as it already ran " + "less than {} hours ago ({} s).", GC_COUNT, GC_BACKOFF/3600000, dt/1000); return; } boolean sufficientEstimatedGain = true; if (gcOptions.isEstimationDisabled()) { gcListener.info("TarMK GC #{}: estimation skipped because it was explicitly disabled", GC_COUNT); } else if (gcOptions.isPaused()) { gcListener.info("TarMK GC #{}: estimation skipped because compaction is paused", GC_COUNT); } else { gcListener.info("TarMK GC #{}: estimation started", GC_COUNT); gcListener.updateStatus(ESTIMATION.message()); Stopwatch watch = Stopwatch.createStarted(); GCEstimation estimate = estimateCompactionGain(); sufficientEstimatedGain = estimate.gcNeeded(); String gcLog = estimate.gcLog(); if (sufficientEstimatedGain) { gcListener.info( "TarMK GC #{}: estimation completed in {} ({} ms). {}", GC_COUNT, watch, watch.elapsed(MILLISECONDS), gcLog); } else { gcListener.skipped( "TarMK GC #{}: estimation completed in {} ({} ms). {}", GC_COUNT, watch, watch.elapsed(MILLISECONDS), gcLog); } } if (sufficientEstimatedGain) { try (GCMemoryBarrier gcMemoryBarrier = new GCMemoryBarrier( sufficientMemory, gcListener, GC_COUNT.get(), gcOptions)) { if (gcOptions.isPaused()) { gcListener.skipped("TarMK GC #{}: compaction paused", GC_COUNT); } else if (!sufficientMemory.get()) { gcListener.skipped("TarMK GC #{}: compaction skipped. Not enough memory", GC_COUNT); } else { CompactionResult compactionResult = compact.get(); if (compactionResult.isSuccess()) { lastSuccessfullGC = System.currentTimeMillis(); } else { gcListener.info("TarMK GC #{}: cleaning up after failed compaction", GC_COUNT); } fileReaper.add(cleanup(compactionResult)); } } } } finally { compactionMonitor.finished(); gcListener.updateStatus(IDLE.message()); } } /** * Estimated compaction gain. The result will be undefined if stopped through * the passed {@code stop} signal. * @return compaction gain estimate */ synchronized GCEstimation estimateCompactionGain() { return new SizeDeltaGcEstimation(gcOptions, gcJournal, stats.getApproximateSize()); } @Nonnull private CompactionResult compactionAborted(@Nonnull GCGeneration generation) { gcListener.compactionFailed(generation); return CompactionResult.aborted(getGcGeneration(), generation); } @Nonnull private CompactionResult compactionSucceeded(@Nonnull GCGeneration generation, @Nonnull RecordId compactedRootId) { gcListener.compactionSucceeded(generation); return CompactionResult.succeeded(generation, gcOptions, compactedRootId); } @CheckForNull private SegmentNodeState getBase() { String root = gcJournal.read().getRoot(); RecordId rootId = RecordId.fromString(tracker, root); if (RecordId.NULL.equals(rootId)) { return null; } try { SegmentNodeState node = segmentReader.readNode(rootId); node.getPropertyCount(); // Resilience: fail early with a SNFE if the segment is not there return node; } catch (SegmentNotFoundException snfe) { gcListener.error("TarMK GC #" + GC_COUNT + ": Base state " + rootId + " is not accessible", snfe); return null; } } synchronized CompactionResult compactFull() { gcListener.info("TarMK GC #{}: running full compaction", GC_COUNT); return compact(null, getGcGeneration().nextFull()); } synchronized CompactionResult compactTail() { gcListener.info("TarMK GC #{}: running tail compaction", GC_COUNT); SegmentNodeState base = getBase(); if (base != null) { return compact(base, getGcGeneration().nextTail()); } gcListener.info("TarMK GC #{}: no base state available, running full compaction instead", GC_COUNT); return compact(null, getGcGeneration().nextFull()); } private CompactionResult compact(SegmentNodeState base, GCGeneration newGeneration) { try { Stopwatch watch = Stopwatch.createStarted(); gcListener.info("TarMK GC #{}: compaction started, gc options={}", GC_COUNT, gcOptions); gcListener.updateStatus(COMPACTION.message()); GCJournalEntry gcEntry = gcJournal.read(); long initialSize = size(); compactionMonitor = new GCNodeWriteMonitor(gcOptions.getGcLogInterval(), gcListener); compactionMonitor.init(GC_COUNT.get(), gcEntry.getRepoSize(), gcEntry.getNodes(), initialSize); SegmentNodeState before = getHead(); CancelCompactionSupplier cancel = new CancelCompactionSupplier(FileStore.this); SegmentWriter writer = defaultSegmentWriterBuilder("c") .with(cacheManager .withAccessTracking("COMPACT", statisticsProvider)) .withGeneration(newGeneration) .withoutWriterPool() .build(FileStore.this); Compactor compactor = new Compactor( segmentReader, writer, getBlobStore(), cancel, compactionMonitor); SegmentNodeState after = compact(base, before, compactor, writer); if (after == null) { gcListener.warn("TarMK GC #{}: compaction cancelled: {}.", GC_COUNT, cancel); return compactionAborted(newGeneration); } gcListener.info("TarMK GC #{}: compaction cycle 0 completed in {} ({} ms). Compacted {} to {}", GC_COUNT, watch, watch.elapsed(MILLISECONDS), before.getRecordId(), after.getRecordId()); int cycles = 0; boolean success = false; while (cycles < gcOptions.getRetryCount() && !(success = revisions.setHead(before.getRecordId(), after.getRecordId(), EXPEDITE_OPTION))) { // Some other concurrent changes have been made. // Rebase (and compact) those changes on top of the // compacted state before retrying to set the head. cycles++; gcListener.info("TarMK GC #{}: compaction detected concurrent commits while compacting. " + "Compacting these commits. Cycle {} of {}", GC_COUNT, cycles, gcOptions.getRetryCount()); gcListener.updateStatus(COMPACTION_RETRY.message() + cycles); Stopwatch cycleWatch = Stopwatch.createStarted(); SegmentNodeState head = getHead(); after = compact(after, head, compactor, writer); if (after == null) { gcListener.warn("TarMK GC #{}: compaction cancelled: {}.", GC_COUNT, cancel); return compactionAborted(newGeneration); } gcListener.info("TarMK GC #{}: compaction cycle {} completed in {} ({} ms). Compacted {} against {} to {}", GC_COUNT, cycles, cycleWatch, cycleWatch.elapsed(MILLISECONDS), head.getRecordId(), before.getRecordId(), after.getRecordId()); before = head; } if (!success) { gcListener.info("TarMK GC #{}: compaction gave up compacting concurrent commits after {} cycles.", GC_COUNT, cycles); int forceTimeout = gcOptions.getForceTimeout(); if (forceTimeout > 0) { gcListener.info("TarMK GC #{}: trying to force compact remaining commits for {} seconds. " + "Concurrent commits to the store will be blocked.", GC_COUNT, forceTimeout); gcListener.updateStatus(COMPACTION_FORCE_COMPACT.message()); Stopwatch forceWatch = Stopwatch.createStarted(); cycles++; cancel.timeOutAfter(forceTimeout, SECONDS); after = forceCompact(after, compactor, writer); success = after != null; if (success) { gcListener.info("TarMK GC #{}: compaction succeeded to force compact remaining commits " + "after {} ({} ms).", GC_COUNT, forceWatch, forceWatch.elapsed(MILLISECONDS)); } else { if (cancel.get()) { gcListener.warn("TarMK GC #{}: compaction failed to force compact remaining commits " + "after {} ({} ms). Compaction was cancelled: {}.", GC_COUNT, forceWatch, forceWatch.elapsed(MILLISECONDS), cancel); } else { gcListener.warn("TarMK GC #{}: compaction failed to force compact remaining commits. " + "after {} ({} ms). Most likely compaction didn't get exclusive access to the store.", GC_COUNT, forceWatch, forceWatch.elapsed(MILLISECONDS)); } } } } if (success) { writer.flush(); gcListener.info("TarMK GC #{}: compaction succeeded in {} ({} ms), after {} cycles", GC_COUNT, watch, watch.elapsed(MILLISECONDS), cycles); return compactionSucceeded(newGeneration, after.getRecordId()); } else { gcListener.info("TarMK GC #{}: compaction failed after {} ({} ms), and {} cycles", GC_COUNT, watch, watch.elapsed(MILLISECONDS), cycles); return compactionAborted(newGeneration); } } catch (InterruptedException e) { gcListener.error("TarMK GC #" + GC_COUNT + ": compaction interrupted", e); currentThread().interrupt(); return compactionAborted(newGeneration); } catch (IOException e) { gcListener.error("TarMK GC #" + GC_COUNT + ": compaction encountered an error", e); return compactionAborted(newGeneration); } } /** * Compact {@code uncompacted} on top of an optional {@code base}. * @param base the base state to compact onto or {@code null} for an empty state. * @param uncompacted the uncompacted state to compact * @param compactor the compactor for creating the new generation of the * uncompacted state. * @param writer the segment writer used by {@code compactor} for writing to the * new generation. * @return compacted clone of {@code uncompacted} or null if cancelled. * @throws IOException */ @CheckForNull private SegmentNodeState compact( @Nullable SegmentNodeState base, @Nonnull SegmentNodeState uncompacted, @Nonnull Compactor compactor, @Nonnull SegmentWriter writer) throws IOException { // Collect a chronologically ordered list of roots for the base and the uncompacted // state. This list consists of all checkpoints followed by the root. LinkedHashMap<String, NodeState> baseRoots = collectRoots(base); LinkedHashMap<String, NodeState> uncompactedRoots = collectRoots(uncompacted); // Compact the list of uncompacted roots to a list of compacted roots. LinkedHashMap<String, NodeState> compactedRoots = compact(baseRoots, uncompactedRoots, compactor); if (compactedRoots == null) { return null; } // Build a compacted super root by replacing the uncompacted roots with // the compacted ones in the original node. SegmentNodeBuilder builder = uncompacted.builder(); for (Entry<String, NodeState> compactedRoot : compactedRoots.entrySet()) { String path = compactedRoot.getKey(); NodeState state = compactedRoot.getValue(); NodeBuilder childBuilder = getChild(builder, getParentPath(path)); childBuilder.setChildNode(getName(path), state); } // Use the segment writer of the *new generation* to persist the compacted super root. RecordId nodeId = writer.writeNode(builder.getNodeState(), uncompacted.getStableIdBytes()); return new SegmentNodeState(segmentReader, segmentWriter, getBlobStore(), nodeId); } /** * Compact a list of uncompacted roots on top of base roots of the same key or * an empty node if none. */ @CheckForNull private LinkedHashMap<String, NodeState> compact( @Nonnull LinkedHashMap<String, NodeState> baseRoots, @Nonnull LinkedHashMap<String, NodeState> uncompactedRoots, @Nonnull Compactor compactor) throws IOException { NodeState onto = baseRoots.get("root"); NodeState previous = onto; LinkedHashMap<String, NodeState> compactedRoots = newLinkedHashMap(); for (Entry<String, NodeState> uncompactedRoot : uncompactedRoots.entrySet()) { String path = uncompactedRoot.getKey(); NodeState state = uncompactedRoot.getValue(); NodeState compacted; if (onto == null) { compacted = compactor.compact(state); } else { compacted = compactor.compact(previous, state, onto); } if (compacted == null) { return null; } previous = state; onto = compacted; compactedRoots.put(path, compacted); } return compactedRoots; } /** * Collect a chronologically ordered list of roots for the base and the uncompacted * state from a {@code superRoot} . This list consists of all checkpoints followed by * the root. */ @Nonnull private LinkedHashMap<String, NodeState> collectRoots(@Nullable SegmentNodeState superRoot) { LinkedHashMap<String, NodeState> roots = newLinkedHashMap(); if (superRoot != null) { List<ChildNodeEntry> checkpoints = newArrayList( superRoot.getChildNode("checkpoints").getChildNodeEntries()); checkpoints.sort((cne1, cne2) -> { long c1 = cne1.getNodeState().getLong("created"); long c2 = cne2.getNodeState().getLong("created"); return Long.compare(c1, c2); }); for (ChildNodeEntry checkpoint : checkpoints) { roots.put("checkpoints/" + checkpoint.getName() + "/root", checkpoint.getNodeState().getChildNode("root")); } roots.put("root", superRoot.getChildNode("root")); } return roots; } @Nonnull private NodeBuilder getChild(NodeBuilder builder, String path) { for (String name : elements(path)) { builder = builder.getChildNode(name); } return builder; } private SegmentNodeState forceCompact( @Nonnull final SegmentNodeState base, @Nonnull final Compactor compactor, @Nonnull SegmentWriter writer) throws InterruptedException { RecordId compactedId = revisions.setHead(new Function<RecordId, RecordId>() { @Nullable @Override public RecordId apply(RecordId headId) { try { long t0 = currentTimeMillis(); SegmentNodeState after = compact( base, segmentReader.readNode(headId), compactor, writer); if (after == null) { gcListener.info("TarMK GC #{}: compaction cancelled after {} seconds", GC_COUNT, (currentTimeMillis() - t0) / 1000); return null; } else { return after.getRecordId(); } } catch (IOException e) { gcListener.error("TarMK GC #{" + GC_COUNT + "}: Error during forced compaction.", e); return null; } } }, timeout(gcOptions.getForceTimeout(), SECONDS)); return compactedId != null ? segmentReader.readNode(compactedId) : null; } private CleanupContext newCleanupContext(Predicate<GCGeneration> old) { return new CleanupContext() { private boolean isUnreferencedBulkSegment(UUID id, boolean referenced) { return !isDataSegmentId(id.getLeastSignificantBits()) && !referenced; } private boolean isOldDataSegment(UUID id, GCGeneration generation) { return isDataSegmentId(id.getLeastSignificantBits()) && old.apply(generation); } @Override public Collection<UUID> initialReferences() { Set<UUID> references = newHashSet(); for (SegmentId id : tracker.getReferencedSegmentIds()) { if (id.isBulkSegmentId()) { references.add(id.asUUID()); } } return references; } @Override public boolean shouldReclaim(UUID id, GCGeneration generation, boolean referenced) { return isUnreferencedBulkSegment(id, referenced) || isOldDataSegment(id, generation); } @Override public boolean shouldFollow(UUID from, UUID to) { return !isDataSegmentId(to.getLeastSignificantBits()); } }; } /** * Cleanup segments whose generation matches the {@link CompactionResult#reclaimer()} predicate. * @return list of files to be removed * @throws IOException */ @Nonnull private List<File> cleanup(@Nonnull CompactionResult compactionResult) throws IOException { Stopwatch watch = Stopwatch.createStarted(); gcListener.info("TarMK GC #{}: cleanup started.", GC_COUNT); gcListener.updateStatus(CLEANUP.message()); segmentCache.clear(); // Suggest to the JVM that now would be a good time // to clear stale weak references in the SegmentTracker System.gc(); CleanupResult cleanupResult = tarFiles.cleanup(newCleanupContext(compactionResult.reclaimer())); if (cleanupResult.isInterrupted()) { gcListener.info("TarMK GC #{}: cleanup interrupted", GC_COUNT); } tracker.clearSegmentIdTables(cleanupResult.getReclaimedSegmentIds(), compactionResult.gcInfo()); gcListener.info("TarMK GC #{}: cleanup marking files for deletion: {}", GC_COUNT, toFileNames(cleanupResult.getRemovableFiles())); long finalSize = size(); long reclaimedSize = cleanupResult.getReclaimedSize(); stats.reclaimed(reclaimedSize); gcJournal.persist(reclaimedSize, finalSize, getGcGeneration(), compactionMonitor.getCompactedNodes(), compactionResult.getCompactedRootId().toString10()); gcListener.cleaned(reclaimedSize, finalSize); gcListener.info("TarMK GC #{}: cleanup completed in {} ({} ms). Post cleanup size is {} ({} bytes)" + " and space reclaimed {} ({} bytes).", GC_COUNT, watch, watch.elapsed(MILLISECONDS), humanReadableByteCount(finalSize), finalSize, humanReadableByteCount(reclaimedSize), reclaimedSize); return cleanupResult.getRemovableFiles(); } private String toFileNames(@Nonnull List<File> files) { if (files.isEmpty()) { return "none"; } else { return Joiner.on(",").join(files); } } /** * Finds all external blob references that are currently accessible * in this repository and adds them to the given collector. Useful * for collecting garbage in an external data store. * <p> * Note that this method only collects blob references that are already * stored in the repository (at the time when this method is called), so * the garbage collector will need some other mechanism for tracking * in-memory references and references stored while this method is * running. * @param collector reference collector called back for each blob reference found */ synchronized void collectBlobReferences(Consumer<String> collector) throws IOException { segmentWriter.flush(); tarFiles.collectBlobReferences(collector, Reclaimers.newOldReclaimer(getGcGeneration(), gcOptions.getRetainedGenerations())); } void cancel() { cancelled = true; } /** * Represents the cancellation policy for the compaction phase. If the disk * space was considered insufficient at least once during compaction (or if * the space was never sufficient to begin with), compaction is considered * canceled. Furthermore when the file store is shutting down, compaction is * considered canceled. * Finally the cancellation can be triggered by a timeout that can be set * at any time. */ private class CancelCompactionSupplier implements Supplier<Boolean> { private final FileStore store; private String reason; private volatile long deadline; public CancelCompactionSupplier(@Nonnull FileStore store) { cancelled = false; this.store = store; } /** * Set a timeout for cancellation. Setting a different timeout cancels * a previous one that did not yet elapse. Setting a timeout after * cancellation took place has no effect. */ public void timeOutAfter(final long duration, @Nonnull final TimeUnit unit) { deadline = currentTimeMillis() + MILLISECONDS.convert(duration, unit); } @Override public Boolean get() { // The outOfDiskSpace and shutdown flags can only transition from // false (their initial values), to true. Once true, there should // be no way to go back. if (!store.sufficientDiskSpace.get()) { reason = "Not enough disk space"; return true; } if (!store.sufficientMemory.get()) { reason = "Not enough memory"; return true; } if (store.shutDown.shutDownRequested()) { reason = "The FileStore is shutting down"; return true; } if (cancelled) { reason = "Cancelled by user"; return true; } if (deadline > 0 && currentTimeMillis() > deadline) { reason = "Timeout after " + deadline/1000 + " seconds"; return true; } return false; } @Override public String toString() { return reason; } } } /** * Instances of this class represent the result from a compaction. Either * {@link #succeeded(GCGeneration, SegmentGCOptions, RecordId) succeeded}, * {@link #aborted(GCGeneration, GCGeneration) aborted} or {@link * #skipped(GCGeneration, SegmentGCOptions, RecordId)} skipped}. */ private abstract static class CompactionResult { @Nonnull private final GCGeneration currentGeneration; protected CompactionResult(@Nonnull GCGeneration currentGeneration) { this.currentGeneration = currentGeneration; } /** * Result of a succeeded compaction. * @param newGeneration the generation successfully created by compaction * @param gcOptions the current GC options used by compaction * @param compactedRootId the record id of the root created by compaction */ static CompactionResult succeeded( @Nonnull GCGeneration newGeneration, @Nonnull final SegmentGCOptions gcOptions, @Nonnull final RecordId compactedRootId) { return new CompactionResult(newGeneration) { @Override Predicate<GCGeneration> reclaimer() { return Reclaimers.newOldReclaimer(newGeneration, gcOptions.getRetainedGenerations()); } @Override boolean isSuccess() { return true; } @Override RecordId getCompactedRootId() { return compactedRootId; } }; } /** * Result of an aborted compaction. * @param currentGeneration the current generation of the store * @param failedGeneration the generation that compaction attempted to create */ static CompactionResult aborted( @Nonnull GCGeneration currentGeneration, @Nonnull final GCGeneration failedGeneration) { return new CompactionResult(currentGeneration) { @Override Predicate<GCGeneration> reclaimer() { return Reclaimers.newExactReclaimer(failedGeneration); } @Override boolean isSuccess() { return false; } }; } /** * Result serving as a placeholder for a compaction that was skipped. * @param currentGeneration the current generation of the store * @param gcOptions the current GC options used by compaction */ static CompactionResult skipped( @Nonnull GCGeneration currentGeneration, @Nonnull final SegmentGCOptions gcOptions, @Nonnull final RecordId compactedRootId) { return new CompactionResult(currentGeneration) { @Override Predicate<GCGeneration> reclaimer() { return Reclaimers.newOldReclaimer(currentGeneration, gcOptions.getRetainedGenerations()); } @Override boolean isSuccess() { return true; } @Override RecordId getCompactedRootId() { return compactedRootId; } }; } /** * @return a predicate determining which segments to * {@link GarbageCollector#cleanup(CompactionResult) clean up} for * the given compaction result. */ abstract Predicate<GCGeneration> reclaimer(); /** * @return {@code true} for {@link #succeeded(GCGeneration, SegmentGCOptions, RecordId) succeeded} * and {@link #skipped(GCGeneration, SegmentGCOptions, RecordId) skipped}, {@code false} otherwise. */ abstract boolean isSuccess(); /** * @return the record id of the compacted root on {@link #isSuccess() success}, * {@link RecordId#NULL} otherwise. */ RecordId getCompactedRootId() { return RecordId.NULL; } /** * @return a diagnostic message describing the outcome of this compaction. */ String gcInfo() { return "gc-count=" + GC_COUNT + ",gc-status=" + (isSuccess() ? "success" : "failed") + ",store-generation=" + currentGeneration + ",reclaim-predicate=" + reclaimer(); } } }
package com.winga.xxl.classifier.multi; import java.io.File; import java.io.IOException; import java.util.Random; import org.wltea.analyzer.lucene.IKAnalyzer; import com.winga.xxl.classifier.data.store.Documents; import com.winga.xxl.classifier.data.store.DocumentsReader; import com.winga.xxl.classifier.data.store.IDocuments; import com.winga.xxl.classifier.data.store.VectorProblem; import com.winga.xxl.classifier.model.IModel; import com.winga.xxl.classifier.model.parser.KMeansXmlModelParser; import com.winga.xxl.classifier.model.parser.NBXmlModelParser; import com.winga.xxl.classifier.model.parser.SvmXmlModelParser; import com.winga.xxl.classifier.model.parser.XmlModelParser; import com.winga.xxl.classifier.parameter.KMeansTrainParameter; import com.winga.xxl.classifier.parameter.NBTrainParameter; import com.winga.xxl.classifier.parameter.PredictParameter; import com.winga.xxl.classifier.parameter.SvmTrainParameter; import com.winga.xxl.classifier.parameter.TrainParameter; import com.winga.xxl.classifier.util.IClassifier; import com.winga.xxl.classifier.util.KmeansCluster; import com.winga.xxl.classifier.util.NbClassifier; import com.winga.xxl.classifier.util.SvmClassifier; public class CategoryPredict { public static double arrayPercentPredict(IDocuments[] document, IModel model, PredictParameter predictParameter, IClassifier classifier) throws IOException { double match = 0; for (int i = 0; i < document.length; i++) { String result = classifier.predict(document[i], model, predictParameter); if (result.equals(document[i].getCategory())) { ++match; } } double accuracy = match / document.length; System.out.println("Total documents number is : " + document.length + " , and right prediction is : " + match + " ."); return accuracy; } public static String multiDecise(String[] subDecisionArray) { int length = subDecisionArray.length; // //Odd number of sub-classifiers. // if (length % 2 == 1) { // // } // else{ // // } if (length != 3) { System.out.println("The number of sub-classifiers is not 3 !"); return null; } // Assume the number of sub-classifiers is 3. if (subDecisionArray[0].equals(subDecisionArray[1])) { return subDecisionArray[0]; } else if (subDecisionArray[0].equals(subDecisionArray[2])) { return subDecisionArray[0]; } else if (subDecisionArray[1].equals(subDecisionArray[2])) { return subDecisionArray[1]; } else { System.out .println("The sub-classifier's decisions are different !"); return "random"; } } public static String multiDecisionPercent(String sampleFilePath, String pendingDocsPath) throws IOException { IDocuments[] testDocs = DocumentsReader .getFromXMLFileDirectory(pendingDocsPath); IDocuments[] sampleDocs = DocumentsReader .getFromXMLFileDirectory(sampleFilePath); IDocuments[] sampleCenterDocs = DocumentsReader .getSampleCenterXML(sampleFilePath); // Get SVM model. String argument = "-c " + "16.0" + " -g " + "0.0078125" + " zcxc"; TrainParameter svmParameter = new SvmTrainParameter(argument); IModel svmModel = new SvmClassifier().train(sampleDocs, svmParameter); // Get the NB model. int featureNum = 500; TrainParameter nbParameter = new NBTrainParameter(featureNum); IModel nbModel = new NbClassifier() .train(sampleCenterDocs, nbParameter); // Get the KMeans model. int cateNum = 7; TrainParameter kmeansParameter = new KMeansTrainParameter(cateNum, null, sampleFilePath); IModel kmeansModel = new KmeansCluster().train(sampleDocs, kmeansParameter); // Get the SVM model. String predictArgv = "-b 0 sdf sdf sdf"; PredictParameter predictParameter = new PredictParameter(predictArgv); // Statistics of multi-predict detail. double match = 0; for (int i = 0; i < testDocs.length; i++) { String nbPredict = new NbClassifier().predict(testDocs[i], nbModel, predictParameter); String svmPredict = new SvmClassifier().predict(testDocs[i], svmModel, predictParameter); String kmeansPredict = new KmeansCluster().predict(testDocs[i], kmeansModel, predictParameter); String[] predictArray = { nbPredict, svmPredict, kmeansPredict }; String predictDicision = multiDecise(predictArray); System.out .println("The " + i + "-th doc's nbpred is " + nbPredict + ", svmpred is " + svmPredict + ", kmeanspred is " + kmeansPredict + " ; the dicision is : " + predictDicision); if (predictDicision.equals(testDocs[i].getCategory())) { ++match; } } double accuracy = match / testDocs.length; return "" + accuracy; } /** * <p> * CreateTime : 2014-12-18 * * @throws IOException * */ public static double arrayPercentPredict(VectorProblem testProb, IModel model, PredictParameter predictParameter, IClassifier svmClassifier) throws IOException { // TODO Auto-generated method stub double match = 0; int length = testProb.l; for (int i = 0; i < length; i++) { double predict = svmClassifier.predict(testProb.x[i], model, predictParameter); if (predict == testProb.y[i]) { ++match; } } double accuracy = match / length; System.out.println("Total documents number is : " + length + " , and right prediction is : " + match + " ."); return accuracy; } /** * The multi-decision method for demo. * * @throws Exception * @date 2015-1-29 * */ public static String multiDicision4Nutch(String title, String content) throws Exception { String kmeansModelPath = "xml" + File.separator + "model" + File.separator + "kmeans_cate.model"; String nbModelPath = "xml" + File.separator + "model" + File.separator + "nb_cate.model"; String svmModelPath = "xml" + File.separator + "model" + File.separator + "svm_cate.model"; return multiDicision4Nutch(title, content, kmeansModelPath, nbModelPath, svmModelPath); } /** * The multi-decision method for Nutch demo. * * @throws Exception * @date 2014-12-29 * */ public static String multiDicision4Nutch(String title, String content, String kmeansModelPath, String nbModelPath, String svmModelPath) throws Exception { // Parser the kmeans model... XmlModelParser kmeansParser = new KMeansXmlModelParser(); IModel kmeansModel = kmeansParser.parser(kmeansModelPath); // Parser the NB model... XmlModelParser nbParser = new NBXmlModelParser(); IModel nbModel = nbParser.parser(nbModelPath); // Parser the svm model... XmlModelParser svmParser = new SvmXmlModelParser(); IModel svmModel = svmParser.parser(svmModelPath); // Initialize the new pending document. Documents doc = new Documents(); doc.setContent(content); doc.setTitle(title); doc.init(new IKAnalyzer()); // Get the SVM model. String predictArgv = "-b 0 sdf sdf sdf"; PredictParameter predictParameter = new PredictParameter(predictArgv); // Statistics of multi-predict detail. double match = 0; String kmeansPredict = new KmeansCluster().predict(doc, kmeansModel, predictParameter); String nbPredict = new NbClassifier().predict(doc, nbModel, predictParameter); String svmPredict = new SvmClassifier().predict(doc, svmModel, predictParameter); // Multi-decision.. String predictDicision = null; if (kmeansPredict.equals(nbPredict)) { predictDicision = kmeansPredict; } else if (kmeansPredict.equals(svmPredict)) { predictDicision = kmeansPredict; } else if (nbPredict.equals(svmPredict)) { predictDicision = nbPredict; } else { // // Random selection. // Random r = new Random(); // int x = r.nextInt(3); // if (x == 0) { // predictDicision = kmeansPredict; // } else if (x == 1) { // predictDicision = nbPredict; // } else { // predictDicision = svmPredict; // } predictDicision = kmeansPredict; } System.out .println("The doc's nbpred is " + nbPredict + ", svmpred is " + svmPredict + ", kmeanspred is " + kmeansPredict + " ; the dicision is : " + predictDicision); return predictDicision; } /** * The multi-decision method for HDFS demo. * * @throws Exception * @date 2014-2-4 * */ public static String multiDicision4Hdfs(String title, String content, String kmeansModelPath, String nbModelPath, String svmModelPath) throws Exception { // Parser the kmeans model... XmlModelParser kmeansParser = new KMeansXmlModelParser(); IModel kmeansModel = kmeansParser.hdfsParser(kmeansModelPath); // Parser the NB model... XmlModelParser nbParser = new NBXmlModelParser(); IModel nbModel = nbParser.hdfsParser(nbModelPath); // Parser the svm model... XmlModelParser svmParser = new SvmXmlModelParser(); IModel svmModel = svmParser.hdfsParser(svmModelPath); // Initialize the new pending document. Documents doc = new Documents(); doc.setContent(content); doc.setTitle(title); doc.init(new IKAnalyzer()); // Get the SVM model. String predictArgv = "-b 0 sdf sdf sdf"; PredictParameter predictParameter = new PredictParameter(predictArgv); // Statistics of multi-predict detail. double match = 0; String kmeansPredict = new KmeansCluster().predict(doc, kmeansModel, predictParameter); String nbPredict = new NbClassifier().predict(doc, nbModel, predictParameter); String svmPredict = new SvmClassifier().predict(doc, svmModel, predictParameter); // Multi-decision.. String predictDicision = null; if (kmeansPredict.equals(nbPredict)) { predictDicision = kmeansPredict; } else if (kmeansPredict.equals(svmPredict)) { predictDicision = kmeansPredict; } else if (nbPredict.equals(svmPredict)) { predictDicision = nbPredict; } else { // // Random selection. // Random r = new Random(); // int x = r.nextInt(3); // if (x == 0) { // predictDicision = kmeansPredict; // } else if (x == 1) { // predictDicision = nbPredict; // } else { // predictDicision = svmPredict; // } predictDicision = kmeansPredict; } System.out .println("The doc's nbpred is " + nbPredict + ", svmpred is " + svmPredict + ", kmeanspred is " + kmeansPredict + " ; the dicision is : " + predictDicision); return predictDicision; } /** * The multi-decision method for multi-class path. * If the flag is 0, it will load the model files from the local Jar file; * If the flag is 1, it will load the model files from the HDFS file; * @throws Exception * @date 2015-2-3 * */ public static String multiDicision4Nutch(String title, String content, String kmeansModelPath, String nbModelPath, String svmModelPath,int flag) throws Exception { //If the flag is 0, it will load the model files from the local Jar file. if(flag==0){ return multiDicision4Nutch(title, content, kmeansModelPath, nbModelPath, svmModelPath); } //If the flag is 1, it will load the model files from the HDFS file. else if(flag==1){ return multiDicision4Hdfs(title, content, kmeansModelPath, nbModelPath, svmModelPath); } else{ return null; } } }
package org.apache.lucene.store; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; import java.io.Closeable; import java.util.Collection; // for javadocs import org.apache.lucene.util.IOUtils; /** A Directory is a flat list of files. Files may be written once, when they * are created. Once a file is created it may only be opened for read, or * deleted. Random access is permitted both when reading and writing. * * <p> Java's i/o APIs not used directly, but rather all i/o is * through this API. This permits things such as: <ul> * <li> implementation of RAM-based indices; * <li> implementation indices stored in a database, via JDBC; * <li> implementation of an index as a single file; * </ul> * * Directory locking is implemented by an instance of {@link * LockFactory}, and can be changed for each Directory * instance using {@link #setLockFactory}. * */ public abstract class Directory implements Closeable { volatile protected boolean isOpen = true; /** Holds the LockFactory instance (implements locking for * this Directory instance). */ protected LockFactory lockFactory; /** * Returns an array of strings, one for each file in the directory. * * @throws NoSuchDirectoryException if the directory is not prepared for any * write operations (such as {@link #createOutput(String, IOContext)}). * @throws IOException in case of other IO errors */ public abstract String[] listAll() throws IOException; /** Returns true iff a file with the given name exists. */ public abstract boolean fileExists(String name) throws IOException; /** Removes an existing file in the directory. */ public abstract void deleteFile(String name) throws IOException; /** * Returns the length of a file in the directory. This method follows the * following contract: * <ul> * <li>Throws {@link FileNotFoundException} if the file does not exist * <li>Returns a value &ge;0 if the file exists, which specifies its length. * </ul> * * @param name the name of the file for which to return the length. * @throws FileNotFoundException if the file does not exist. * @throws IOException if there was an IO error while retrieving the file's * length. */ public abstract long fileLength(String name) throws IOException; /** Creates a new, empty file in the directory with the given name. Returns a stream writing this file. */ public abstract IndexOutput createOutput(String name, IOContext context) throws IOException; /** * Ensure that any writes to these files are moved to * stable storage. Lucene uses this to properly commit * changes to the index, to prevent a machine/OS crash * from corrupting the index.<br/> * <br/> * NOTE: Clients may call this method for same files over * and over again, so some impls might optimize for that. * For other impls the operation can be a noop, for various * reasons. */ public abstract void sync(Collection<String> names) throws IOException; /** Returns a stream reading an existing file, with the * specified read buffer size. The particular Directory * implementation may ignore the buffer size. Currently * the only Directory implementations that respect this * parameter are {@link FSDirectory} and {@link * CompoundFileDirectory}. */ public abstract IndexInput openInput(String name, IOContext context) throws IOException; /** Construct a {@link Lock}. * @param name the name of the lock file */ public Lock makeLock(String name) { return lockFactory.makeLock(name); } /** * Attempt to clear (forcefully unlock and remove) the * specified lock. Only call this at a time when you are * certain this lock is no longer in use. * @param name name of the lock to be cleared. */ public void clearLock(String name) throws IOException { if (lockFactory != null) { lockFactory.clearLock(name); } } /** Closes the store. */ @Override public abstract void close() throws IOException; /** * Set the LockFactory that this Directory instance should * use for its locking implementation. Each * instance of * LockFactory should only be used for one directory (ie, * do not share a single instance across multiple * Directories). * * @param lockFactory instance of {@link LockFactory}. */ public void setLockFactory(LockFactory lockFactory) throws IOException { assert lockFactory != null; this.lockFactory = lockFactory; lockFactory.setLockPrefix(this.getLockID()); } /** * Get the LockFactory that this Directory instance is * using for its locking implementation. Note that this * may be null for Directory implementations that provide * their own locking implementation. */ public LockFactory getLockFactory() { return this.lockFactory; } /** * Return a string identifier that uniquely differentiates * this Directory instance from other Directory instances. * This ID should be the same if two Directory instances * (even in different JVMs and/or on different machines) * are considered "the same index". This is how locking * "scopes" to the right index. */ public String getLockID() { return this.toString(); } @Override public String toString() { return super.toString() + " lockFactory=" + getLockFactory(); } /** * Copies the file <i>src</i> to {@link Directory} <i>to</i> under the new * file name <i>dest</i>. * <p> * If you want to copy the entire source directory to the destination one, you * can do so like this: * * <pre class="prettyprint"> * Directory to; // the directory to copy to * for (String file : dir.listAll()) { * dir.copy(to, file, newFile, IOContext.DEFAULT); // newFile can be either file, or a new name * } * </pre> * <p> * <b>NOTE:</b> this method does not check whether <i>dest</i> exist and will * overwrite it if it does. */ public void copy(Directory to, String src, String dest, IOContext context) throws IOException { IndexOutput os = null; IndexInput is = null; IOException priorException = null; try { os = to.createOutput(dest, context); is = openInput(src, context); os.copyBytes(is, is.length()); } catch (IOException ioe) { priorException = ioe; } finally { boolean success = false; try { IOUtils.closeWhileHandlingException(priorException, os, is); success = true; } finally { if (!success) { try { to.deleteFile(dest); } catch (Throwable t) { } } } } } /** * Creates an {@link IndexInputSlicer} for the given file name. * IndexInputSlicer allows other {@link Directory} implementations to * efficiently open one or more sliced {@link IndexInput} instances from a * single file handle. The underlying file handle is kept open until the * {@link IndexInputSlicer} is closed. * * @throws IOException * if an {@link IOException} occurs * @lucene.internal * @lucene.experimental */ public IndexInputSlicer createSlicer(final String name, final IOContext context) throws IOException { ensureOpen(); return new IndexInputSlicer() { private final IndexInput base = Directory.this.openInput(name, context); @Override public IndexInput openSlice(String sliceDescription, long offset, long length) { return new SlicedIndexInput("SlicedIndexInput(" + sliceDescription + " in " + base + ")", base, offset, length); } @Override public void close() throws IOException { base.close(); } @Override public IndexInput openFullSlice() { return base.clone(); } }; } /** * @throws AlreadyClosedException if this Directory is closed */ protected final void ensureOpen() throws AlreadyClosedException { if (!isOpen) throw new AlreadyClosedException("this Directory is closed"); } /** * Allows to create one or more sliced {@link IndexInput} instances from a single * file handle. Some {@link Directory} implementations may be able to efficiently map slices of a file * into memory when only certain parts of a file are required. * @lucene.internal * @lucene.experimental */ public abstract class IndexInputSlicer implements Closeable { /** * Returns an {@link IndexInput} slice starting at the given offset with the given length. */ public abstract IndexInput openSlice(String sliceDescription, long offset, long length) throws IOException; /** * Returns an {@link IndexInput} slice starting at offset <i>0</i> with a * length equal to the length of the underlying file * @deprecated Only for reading CFS files from 3.x indexes. */ @Deprecated // can we remove this somehow? public abstract IndexInput openFullSlice() throws IOException; } /** Implementation of an IndexInput that reads from a portion of * a file. */ private static final class SlicedIndexInput extends BufferedIndexInput { IndexInput base; long fileOffset; long length; SlicedIndexInput(final String sliceDescription, final IndexInput base, final long fileOffset, final long length) { this(sliceDescription, base, fileOffset, length, BufferedIndexInput.BUFFER_SIZE); } SlicedIndexInput(final String sliceDescription, final IndexInput base, final long fileOffset, final long length, int readBufferSize) { super("SlicedIndexInput(" + sliceDescription + " in " + base + " slice=" + fileOffset + ":" + (fileOffset+length) + ")", readBufferSize); this.base = base.clone(); this.fileOffset = fileOffset; this.length = length; } @Override public SlicedIndexInput clone() { SlicedIndexInput clone = (SlicedIndexInput)super.clone(); clone.base = base.clone(); clone.fileOffset = fileOffset; clone.length = length; return clone; } /** Expert: implements buffer refill. Reads bytes from the current * position in the input. * @param b the array to read bytes into * @param offset the offset in the array to start storing bytes * @param len the number of bytes to read */ @Override protected void readInternal(byte[] b, int offset, int len) throws IOException { long start = getFilePointer(); if(start + len > length) throw new EOFException("read past EOF: " + this); base.seek(fileOffset + start); base.readBytes(b, offset, len, false); } /** Expert: implements seek. Sets current position in this file, where * the next {@link #readInternal(byte[],int,int)} will occur. * @see #readInternal(byte[],int,int) */ @Override protected void seekInternal(long pos) {} /** Closes the stream to further operations. */ @Override public void close() throws IOException { base.close(); } @Override public long length() { return length; } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2014 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.importLogFiles; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.List; import net.sf.json.JSONObject; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.parosproxy.paros.network.HttpMalformedHeaderException; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.zap.extension.api.ApiAction; import org.zaproxy.zap.extension.api.ApiException; import org.zaproxy.zap.extension.api.ApiImplementor; import org.zaproxy.zap.extension.api.ApiOther; import org.zaproxy.zap.extension.api.ApiResponse; import org.zaproxy.zap.extension.api.ApiResponseElement; import org.zaproxy.zap.extension.importLogFiles.ExtensionImportLogFiles.LogType; /// This class extends the ImportLog functionality to the ZAP REST API public class ImportLogAPI extends ApiImplementor { private static Logger log = LogManager.getLogger(ImportLogAPI.class); // API method names private static final String PREFIX = "importLogFiles"; private static final String Import_Zap_Log_From_File = "ImportZAPLogFromFile"; private static final String Import_ModSec_Log_From_File = "ImportModSecurityLogFromFile"; private static final String Import_Zap_HttpRequestResponsePair = "ImportZAPHttpRequestResponsePair"; private static final String POST_ModSec_AuditEvent = "PostModSecurityAuditEvent"; private static final String OtherPOST_ModSec_AuditEvent = "OtherPostModSecurityAuditEvent"; // API method parameters private static final String PARAM_FILE = "FilePath"; private static final String PARAM_REQUEST = "HTTPRequest"; private static final String PARAM_RESPONSE = "HTTPResponse"; private static final String PARAM_AuditEventString = "AuditEventString"; // Serverside directory locations private static final String SERVERSIDE_FILEREPOSITORY = org.parosproxy.paros.Constant.getZapHome() + "Imported_Logs"; private static final String ZAP_LOGS_DIR = SERVERSIDE_FILEREPOSITORY + File.separatorChar + "ZAPLogs"; private static final String MOD_SEC_LOGS_DIR = SERVERSIDE_FILEREPOSITORY + File.separatorChar + "ModSecLogs"; // private static String ADDEDFILESDICTIONARY = SERVERSIDE_FILEREPOSITORY + "\\AddedFiles"; private static boolean ZapDirChecked = false; private static boolean ModSecDirChecked = false; // private static boolean DirAddedFilesChecked = false; // Get the existing logging repository for REST retrieval if it exists, if not create it. private static String getLoggingStorageDirectory(LogType logType) { if (logType == LogType.ZAP) { if (!ZapDirChecked) { File directory = new File(ZAP_LOGS_DIR); if (!directory.isDirectory()) { directory.mkdirs(); ZapDirChecked = true; return directory.getAbsolutePath(); } return ZAP_LOGS_DIR; } return ZAP_LOGS_DIR; } if (!ModSecDirChecked) { File directory = new File(MOD_SEC_LOGS_DIR); if (!directory.isDirectory()) { directory.mkdirs(); ModSecDirChecked = true; return directory.getAbsolutePath(); } return MOD_SEC_LOGS_DIR; } return MOD_SEC_LOGS_DIR; } /* private static String getAddedFilesDictionary() throws IOException { while (!DirAddedFilesChecked) { File hashes = new File(ADDEDFILESDICTIONARY); if (!hashes.isFile()) hashes.createNewFile(); return hashes.getAbsolutePath(); } return ADDEDFILESDICTIONARY; } private static void appendAddedFilesHashes(File file) throws IOException { BufferedWriter wr = null; FileInputStream fs = null; try { fs = new FileInputStream(file); String md5 = DigestUtils.md5Hex(fs); wr = new BufferedWriter(new FileWriter(getAddedFilesDictionary())); wr.write(md5); wr.newLine(); } finally { try { if (fs != null) fs.close(); if (wr != null) wr.close(); } catch (IOException ex) { log.error(ex.getMessage(), ex); } } } private static boolean FileAlreadyExists(File file) { boolean fileExists = false; FileInputStream fs = null; BufferedReader br = null; try { fs = new FileInputStream(file); String md5 = DigestUtils.md5Hex(fs); // TODO figure out what parts of the file to compare with MD5 as currently its giving different hashes as the // metadata is different. // Probably have to hash the string[] lines of the file. Also might be worth adding an abstraction on the REST api // so that the files are named by the hash. String sCurrentLine; br = new BufferedReader(new FileReader(getAddedFilesDictionary())); while ((sCurrentLine = br.readLine()) != null) { if (md5 == sCurrentLine) fileExists = true; } } catch (Exception e) { log.error(e.getMessage(), e); } finally { try { if (fs != null) fs.close(); if (br != null) br.close(); } catch (IOException ex) { log.error(ex.getMessage(), ex); } } return fileExists; } */ /** Provided only for API client generator usage. */ public ImportLogAPI() { this(null); } // Methods to show in the http API view public ImportLogAPI(ExtensionImportLogFiles extensionImportLogFiles) { this.addApiAction(new ApiAction(Import_Zap_Log_From_File, new String[] {PARAM_FILE})); this.addApiAction(new ApiAction(Import_ModSec_Log_From_File, new String[] {PARAM_FILE})); this.addApiAction( new ApiAction( Import_Zap_HttpRequestResponsePair, new String[] {PARAM_REQUEST, PARAM_RESPONSE})); this.addApiAction( new ApiAction(POST_ModSec_AuditEvent, null, new String[] {PARAM_AuditEventString})); this.addApiOthers( new ApiOther(OtherPOST_ModSec_AuditEvent, new String[] {PARAM_AuditEventString})); } @Override public HttpMessage handleApiOther(HttpMessage msg, String name, JSONObject params) { ExtensionImportLogFiles importer = new ExtensionImportLogFiles(); if (OtherPOST_ModSec_AuditEvent.equals(name)) { String trimmed = params.getString("POSTBODY") .replaceFirst("zapapiformat=JSON&AuditEventString=", ""); String filename = "\\" + java.util.UUID.randomUUID().toString() + ".txt"; try { // TODO - this doesn't work as the source needs to be a local file processLogs(filename, importer, LogType.MOD_SECURITY_2, trimmed); } catch (Exception ex) { // String errMessage = "Failed - " + ex.getMessage(); // return new ApiResponseElement("Parsing audit event log to ZAPs site tree", // errMessage); } } return null; } @Override public ApiResponse handleApiAction(String name, JSONObject params) throws ApiException { ExtensionImportLogFiles importer = new ExtensionImportLogFiles(); if (Import_Zap_Log_From_File.equals(name)) return processLogsFromFile(params.getString(PARAM_FILE), importer, LogType.ZAP); if (Import_ModSec_Log_From_File.equals(name)) return processLogsFromFile( params.getString(PARAM_FILE), importer, LogType.MOD_SECURITY_2); if (Import_Zap_HttpRequestResponsePair.equals(name)) { try { List<HttpMessage> messages = importer.getHttpMessageFromPair( params.getString(PARAM_REQUEST), params.getString(PARAM_RESPONSE)); return ProcessRequestResponsePair(messages, importer); } catch (HttpMalformedHeaderException e) { String errMessage = "Failed - " + e.getMessage(); return new ApiResponseElement("Parsing logs files to ZAPs site tree", errMessage); } } // TODO - Need to add functionality to handle the POSTBody processing at some level of the // implementation. if (POST_ModSec_AuditEvent.equals(name)) { // TODO - figure out how best to add the post and where the params should be set!!! String trimmed = params.getString("POSTBODY") .replaceFirst("zapapiformat=JSON&AuditEventString=", ""); String filename = "\\" + java.util.UUID.randomUUID().toString() + ".txt"; try { // TODO - this doesn't work as the source needs to be a local file return processLogs(filename, importer, LogType.MOD_SECURITY_2, trimmed); } catch (Exception ex) { String errMessage = "Failed - " + ex.getMessage(); return new ApiResponseElement( "Parsing audit event log to ZAPs site tree", errMessage); } } return new ApiResponseElement("Requested Method", "Failed - Method Not Found"); } public static ApiResponseElement processLogsFromFile( String filePath, ExtensionImportLogFiles importer, LogType logType) { return processLogs(filePath, importer, logType, null); } /** * This method creates a file in the application data folder and streams the input from the ZAP * API to it depending on how it receives the data (HTTP POST or direct file reference) * * @param filePath, in the case of the HTTP POST this is just a guid created from * pre-processing. * @param importer * @param logType - Either ZAP or ModSec currently * @param httpPOSTData - If this is null this signifies to read from the given filePath * @return */ private static ApiResponseElement processLogs( String filePath, ExtensionImportLogFiles importer, LogType logType, String httpPOSTData) { // Not appending the file with client state info as REST should produce a resource based on // the request indefinitely. String targetfileName = filePath.substring(filePath.lastIndexOf("\\") + 1, filePath.lastIndexOf(".")) + ".txt"; String absoluteTargetFilePath = getLoggingStorageDirectory(logType) + "\\" + targetfileName; File targetFile = new File(absoluteTargetFilePath); if (targetFile.isFile()) { return new ApiResponseElement( "Parsing logs files to ZAPs site tree", "Not processed - File already added"); } if (httpPOSTData == null) { try (BufferedReader br = new BufferedReader(new FileReader(filePath)); BufferedWriter wr = new BufferedWriter(new FileWriter(targetFile))) { String sCurrentLine; while ((sCurrentLine = br.readLine()) != null) { wr.write(sCurrentLine); wr.newLine(); } } catch (IOException e) { log.error(e.getMessage(), e); } } else { try (FileOutputStream fop = new FileOutputStream(targetFile)) { byte[] contentInBytes = httpPOSTData.getBytes(); fop.write(contentInBytes); fop.flush(); } catch (IOException ex) { log.error(ex.getMessage(), ex); } } importer.processInput(targetFile, logType); return new ApiResponseElement("Parsing log files to ZAPs site tree", "Suceeded"); } private static ApiResponseElement ProcessRequestResponsePair( List<HttpMessage> messages, ExtensionImportLogFiles importer) { try { importer.addToTree(importer.getHistoryRefs(messages)); return new ApiResponseElement("Parsing log files to ZAPs site tree", "Suceeded"); } catch (HttpMalformedHeaderException httpex) { String exceptionMessage = String.format( "Parsing log files to ZAPs site tree", "Failed - %s", httpex.getLocalizedMessage()); return new ApiResponseElement(exceptionMessage); } catch (Exception e) { String exceptionMessage = String.format( "Parsing log files to ZAPs site tree", "Failed - %s", e.getLocalizedMessage()); return new ApiResponseElement(exceptionMessage); } } @Override public String getPrefix() { return PREFIX; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.vxquery.metadata; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.InetAddress; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.TrueFileFilter; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hyracks.api.client.NodeControllerInfo; import org.apache.hyracks.api.comm.IFrame; import org.apache.hyracks.api.comm.IFrameFieldAppender; import org.apache.hyracks.api.comm.VSizeFrame; import org.apache.hyracks.api.context.IHyracksTaskContext; import org.apache.hyracks.api.dataflow.IOperatorNodePushable; import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider; import org.apache.hyracks.api.dataflow.value.RecordDescriptor; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.job.IOperatorDescriptorRegistry; import org.apache.hyracks.data.std.util.ArrayBackedValueStorage; import org.apache.hyracks.dataflow.common.comm.io.FrameFixedFieldTupleAppender; import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor; import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor; import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable; import org.apache.hyracks.hdfs.ContextFactory; import org.apache.hyracks.hdfs2.dataflow.FileSplitsFactory; import org.apache.vxquery.context.DynamicContext; import org.apache.vxquery.hdfs2.HDFSFunctions; import org.apache.vxquery.jsonparser.JSONParser; import org.apache.vxquery.xmlparser.ITreeNodeIdProvider; import org.apache.vxquery.xmlparser.TreeNodeIdProvider; import org.apache.vxquery.xmlparser.XMLParser; public class VXQueryCollectionOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor { private static final long serialVersionUID = 1L; private short dataSourceId; private short totalDataSources; private String[] collectionPartitions; private List<Integer> childSeq; private List<Byte[]> valueSeq; protected static final Logger LOGGER = Logger.getLogger(VXQueryCollectionOperatorDescriptor.class.getName()); private HDFSFunctions hdfs; private String tag; private static final String START_TAG = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n"; private final String hdfsConf; private final Map<String, NodeControllerInfo> nodeControllerInfos; public VXQueryCollectionOperatorDescriptor(IOperatorDescriptorRegistry spec, AbstractVXQueryDataSource ds, RecordDescriptor rDesc, String hdfsConf, Map<String, NodeControllerInfo> nodeControllerInfos) { super(spec, 1, 1); collectionPartitions = ds.getPartitions(); dataSourceId = (short) ds.getDataSourceId(); totalDataSources = (short) ds.getTotalDataSources(); childSeq = ds.getChildSeq(); valueSeq = ds.getValueSeq(); recordDescriptors[0] = rDesc; this.tag = ds.getTag(); this.hdfsConf = hdfsConf; this.nodeControllerInfos = nodeControllerInfos; } @Override public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException { final FrameTupleAccessor fta = new FrameTupleAccessor( recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)); final int fieldOutputCount = recordDescProvider.getOutputRecordDescriptor(getActivityId(), 0).getFieldCount(); final IFrame frame = new VSizeFrame(ctx); final IFrameFieldAppender appender = new FrameFixedFieldTupleAppender(fieldOutputCount); final short partitionId = (short) ctx.getTaskAttemptId().getTaskId().getPartition(); final ITreeNodeIdProvider nodeIdProvider = new TreeNodeIdProvider(partitionId, dataSourceId, totalDataSources); final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId(); final DynamicContext dCtx = (DynamicContext) ctx.getJobletContext().getGlobalJobData(); final ArrayBackedValueStorage jsonAbvs = new ArrayBackedValueStorage(); final String collectionName = collectionPartitions[partition % collectionPartitions.length]; final XMLParser parser = new XMLParser(false, nodeIdProvider, nodeId, appender, childSeq, dCtx.getStaticContext()); final JSONParser jparser = new JSONParser(valueSeq); return new AbstractUnaryInputUnaryOutputOperatorNodePushable() { @Override public void open() throws HyracksDataException { appender.reset(frame, true); writer.open(); hdfs = new HDFSFunctions(nodeControllerInfos, hdfsConf); } @Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { fta.reset(buffer); String collectionModifiedName = collectionName.replace("${nodeId}", nodeId); if (!collectionModifiedName.contains("hdfs:/")) { File collectionDirectory = new File(collectionModifiedName); // check if directory is in the local file system if (collectionDirectory.exists()) { // Go through each tuple. if (collectionDirectory.isDirectory()) { xmlAndJsonCollection(collectionDirectory); } else { throw new HyracksDataException("Invalid directory parameter (" + nodeId + ":" + collectionDirectory.getAbsolutePath() + ") passed to collection."); } } } else { // Else check in HDFS file system // Get instance of the HDFS filesystem FileSystem fs = hdfs.getFileSystem(); if (fs != null) { collectionModifiedName = collectionModifiedName.replaceAll("hdfs:/", ""); Path directory = new Path(collectionModifiedName); Path xmlDocument; if (tag != null) { hdfs.setJob(directory.toString(), tag); tag = "<" + tag + ">"; Job job = hdfs.getJob(); InputFormat inputFormat = hdfs.getinputFormat(); try { hdfs.scheduleSplits(); ArrayList<Integer> schedule = hdfs .getScheduleForNode(InetAddress.getLocalHost().getHostAddress()); List<InputSplit> splits = hdfs.getSplits(); List<FileSplit> fileSplits = new ArrayList<>(); for (int i : schedule) { fileSplits.add((FileSplit) splits.get(i)); } FileSplitsFactory splitsFactory = new FileSplitsFactory(fileSplits); List<FileSplit> inputSplits = splitsFactory.getSplits(); ContextFactory ctxFactory = new ContextFactory(); int size = inputSplits.size(); InputStream stream; String value; RecordReader reader; TaskAttemptContext context; for (int i = 0; i < size; i++) { // read split context = ctxFactory.createContext(job.getConfiguration(), i); reader = inputFormat.createRecordReader(inputSplits.get(i), context); reader.initialize(inputSplits.get(i), context); while (reader.nextKeyValue()) { value = reader.getCurrentValue().toString(); // Split value if it contains more than // one item with the tag if (StringUtils.countMatches(value, tag) > 1) { String[] items = value.split(tag); for (String item : items) { if (item.length() > 0) { item = START_TAG + tag + item; stream = new ByteArrayInputStream( item.getBytes(StandardCharsets.UTF_8)); parser.parseHDFSElements(stream, writer, fta, i); stream.close(); } } } else { value = START_TAG + value; // create an input stream to the // file currently reading and send // it to parser stream = new ByteArrayInputStream(value.getBytes(StandardCharsets.UTF_8)); parser.parseHDFSElements(stream, writer, fta, i); stream.close(); } } reader.close(); } } catch (Exception e) { throw new HyracksDataException(e); } } else { try { // check if the path exists and is a directory if (fs.exists(directory) && fs.isDirectory(directory)) { for (int tupleIndex = 0; tupleIndex < fta.getTupleCount(); ++tupleIndex) { // read every file in the directory RemoteIterator<LocatedFileStatus> it = fs.listFiles(directory, true); while (it.hasNext()) { xmlDocument = it.next().getPath(); if (fs.isFile(xmlDocument)) { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine( "Starting to read XML document: " + xmlDocument.getName()); } // create an input stream to the // file currently reading and // send it to parser InputStream in = fs.open(xmlDocument).getWrappedStream(); parser.parseHDFSElements(in, writer, fta, tupleIndex); in.close(); } } } } else { throw new HyracksDataException("Invalid HDFS directory parameter (" + nodeId + ":" + directory + ") passed to collection."); } } catch (Exception e) { throw new HyracksDataException(e); } } try { fs.close(); } catch (Exception e) { throw new HyracksDataException(e); } } } } public void xmlAndJsonCollection(File directory) throws HyracksDataException { Reader input; for (int tupleIndex = 0; tupleIndex < fta.getTupleCount(); ++tupleIndex) { Iterator<File> it = FileUtils.iterateFiles(directory, new VXQueryIOFileFilter(), TrueFileFilter.INSTANCE); while (it.hasNext()) { File file = it.next(); String fileName = file.getName().toLowerCase(); if (fileName.endsWith(".xml")) { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Starting to read XML document: " + file.getAbsolutePath()); } parser.parseElements(file, writer, tupleIndex); } else if (fileName.endsWith(".json")) { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Starting to read JSON document: " + file.getAbsolutePath()); } try { jsonAbvs.reset(); input = new InputStreamReader(new FileInputStream(file)); jparser.parse(input, jsonAbvs, writer, appender); } catch (FileNotFoundException e) { throw new HyracksDataException(e.toString()); } } } } } @Override public void fail() throws HyracksDataException { writer.fail(); } @Override public void close() throws HyracksDataException { // Check if needed? if (appender.getTupleCount() > 0) { appender.flush(writer); } writer.close(); } }; } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.io; import com.facebook.buck.config.Config; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.io.windowsfs.WindowsFS; import com.facebook.buck.util.BuckConstant; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.autosparse.AutoSparseConfig; import com.facebook.buck.util.environment.Platform; import com.facebook.buck.util.sha1.Sha1HashCode; import com.facebook.buck.zip.CustomZipEntry; import com.facebook.buck.zip.CustomZipOutputStream; import com.facebook.buck.zip.ZipOutputStreams; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Strings; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.UnmodifiableIterator; import com.google.common.hash.Hashing; import com.google.common.io.ByteStreams; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.channels.Channels; import java.nio.file.CopyOption; import java.nio.file.DirectoryStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystem; import java.nio.file.FileSystemLoopException; import java.nio.file.FileVisitOption; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.FileTime; import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFilePermission; import java.util.ArrayDeque; import java.util.Collection; import java.util.Comparator; import java.util.EnumSet; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Properties; import java.util.Set; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.regex.Pattern; import javax.annotation.Nullable; /** An injectable service for interacting with the filesystem relative to the project root. */ public class ProjectFilesystem { private final boolean windowsSymlinks; /** Controls the behavior of how the source should be treated when copying. */ public enum CopySourceMode { /** Copy the single source file into the destination path. */ FILE, /** * Treat the source as a directory and copy each file inside it to the destination path, which * must be a directory. */ DIRECTORY_CONTENTS_ONLY, /** * Treat the source as a directory. Copy the directory and its contents to the destination path, * which must be a directory. */ DIRECTORY_AND_CONTENTS, } // A non-exhaustive list of characters that might indicate that we're about to deal with a glob. private static final Pattern GLOB_CHARS = Pattern.compile("[\\*\\?\\{\\[]"); private static final Path EDEN_MAGIC_PATH_ELEMENT = Paths.get(".eden"); @VisibleForTesting static final String BUCK_BUCKD_DIR_KEY = "buck.buckd_dir"; private final Path projectRoot; private final BuckPaths buckPaths; private final ImmutableSet<PathOrGlobMatcher> blackListedPaths; private final ImmutableSet<PathOrGlobMatcher> blackListedDirectories; /** Supplier that returns an absolute path that is guaranteed to exist. */ private final Supplier<Path> tmpDir; private final ProjectFilesystemDelegate delegate; // Defaults to false, and so paths should be valid. @VisibleForTesting protected boolean ignoreValidityOfPaths; public ProjectFilesystem(Path root) throws InterruptedException { this(root, new Config()); } public static ProjectFilesystem createNewOrThrowHumanReadableException(Path path) throws InterruptedException { try { // toRealPath() is necessary to resolve symlinks, allowing us to later // check whether files are inside or outside of the project without issue. return new ProjectFilesystem(path.toRealPath().normalize()); } catch (IOException e) { throw new HumanReadableException( String.format( ("Failed to resolve project root [%s]." + "Check if it exists and has the right permissions."), path.toAbsolutePath()), e); } } /** * This constructor is restricted to {@code protected} because it is generally best to let {@link * ProjectFilesystemDelegateFactory#newInstance(Path, Path, String, AutoSparseConfig)} create an * appropriate delegate. Currently, the only case in which we need to override this behavior is in * unit tests. */ protected ProjectFilesystem( Path root, ProjectFilesystemDelegate delegate, boolean windowsSymlinks) { this( root.getFileSystem(), root, ImmutableSet.of(), getDefaultBuckPaths(root), delegate, windowsSymlinks); } public ProjectFilesystem(Path root, Config config) throws InterruptedException { this( root.getFileSystem(), root, extractIgnorePaths(root, config, getConfiguredBuckPaths(root, config)), getConfiguredBuckPaths(root, config), ProjectFilesystemDelegateFactory.newInstance( root, getConfiguredBuckPaths(root, config).getBuckOut(), config.getValue("version_control", "hg_cmd").orElse("hg"), AutoSparseConfig.of(config)), config.getBooleanValue("project", "windows_symlinks", false)); } /** * For testing purposes, subclasses might want to skip some of the verification done by the * constructor on its arguments. */ protected boolean shouldVerifyConstructorArguments() { return true; } private ProjectFilesystem( FileSystem vfs, final Path root, ImmutableSet<PathOrGlobMatcher> blackListedPaths, BuckPaths buckPaths, ProjectFilesystemDelegate delegate, boolean windowsSymlinks) { if (shouldVerifyConstructorArguments()) { Preconditions.checkArgument(Files.isDirectory(root), "%s must be a directory", root); Preconditions.checkState(vfs.equals(root.getFileSystem())); Preconditions.checkArgument(root.isAbsolute()); } this.projectRoot = MorePaths.normalize(root); this.delegate = delegate; this.ignoreValidityOfPaths = false; this.blackListedPaths = FluentIterable.from(blackListedPaths) .append( FluentIterable.from( // "Path" is Iterable, so avoid adding each segment. // We use the default value here because that's what we've always done. ImmutableSet.of( getCacheDir( root, Optional.of(buckPaths.getCacheDir().toString()), buckPaths))) .append(ImmutableSet.of(buckPaths.getTrashDir())) .transform(PathOrGlobMatcher::new)) .toSet(); this.buckPaths = buckPaths; this.blackListedDirectories = FluentIterable.from(this.blackListedPaths) .filter(matcher -> matcher.getType() == PathOrGlobMatcher.Type.PATH) .transform( matcher -> { Path path = matcher.getPath(); ImmutableSet<Path> filtered = MorePaths.filterForSubpaths(ImmutableSet.of(path), root); if (filtered.isEmpty()) { return path; } return Iterables.getOnlyElement(filtered); }) // TODO(#10068334) So we claim to ignore this path to preserve existing behaviour, but we // really don't end up ignoring it in reality (see extractIgnorePaths). .append(ImmutableSet.of(buckPaths.getBuckOut())) .transform(PathOrGlobMatcher::new) .append( Iterables.filter( this.blackListedPaths, input -> input.getType() == PathOrGlobMatcher.Type.GLOB)) .toSet(); this.tmpDir = Suppliers.memoize( () -> { Path relativeTmpDir = ProjectFilesystem.this.buckPaths.getTmpDir(); try { mkdirs(relativeTmpDir); } catch (IOException e) { throw new RuntimeException(e); } return relativeTmpDir; }); this.windowsSymlinks = windowsSymlinks; } private static BuckPaths getDefaultBuckPaths(Path rootPath) { return BuckPaths.of( rootPath.getFileSystem().getPath(BuckConstant.getBuckOutputPath().toString())); } private static BuckPaths getConfiguredBuckPaths(Path rootPath, Config config) { BuckPaths buckPaths = getDefaultBuckPaths(rootPath); Optional<String> configuredBuckOut = config.getValue("project", "buck_out"); if (configuredBuckOut.isPresent()) { buckPaths = buckPaths.withConfiguredBuckOut( rootPath.getFileSystem().getPath(configuredBuckOut.get())); } return buckPaths; } private static Path getCacheDir(Path root, Optional<String> value, BuckPaths buckPaths) { String cacheDir = value.orElse(root.resolve(buckPaths.getCacheDir()).toString()); Path toReturn = root.getFileSystem().getPath(cacheDir); toReturn = MorePaths.expandHomeDir(toReturn); if (toReturn.isAbsolute()) { return toReturn; } ImmutableSet<Path> filtered = MorePaths.filterForSubpaths(ImmutableSet.of(toReturn), root); if (filtered.isEmpty()) { // OK. For some reason the relative path managed to be out of our directory. return toReturn; } return Iterables.getOnlyElement(filtered); } private static ImmutableSet<PathOrGlobMatcher> extractIgnorePaths( final Path root, Config config, final BuckPaths buckPaths) { ImmutableSet.Builder<PathOrGlobMatcher> builder = ImmutableSet.builder(); builder.add(new PathOrGlobMatcher(root, ".idea")); final String projectKey = "project"; final String ignoreKey = "ignore"; String buckdDirProperty = System.getProperty(BUCK_BUCKD_DIR_KEY, ".buckd"); if (!Strings.isNullOrEmpty(buckdDirProperty)) { builder.add(new PathOrGlobMatcher(root, buckdDirProperty)); } Path cacheDir = getCacheDir(root, config.getValue("cache", "dir"), buckPaths); builder.add(new PathOrGlobMatcher(cacheDir)); builder.addAll( FluentIterable.from(config.getListWithoutComments(projectKey, ignoreKey)) .transform( new Function<String, PathOrGlobMatcher>() { @Nullable @Override public PathOrGlobMatcher apply(String input) { // We don't really want to ignore the output directory when doing things like filesystem // walks, so return null if (buckPaths.getBuckOut().toString().equals(input)) { return null; //root.getFileSystem().getPathMatcher("glob:**"); } if (GLOB_CHARS.matcher(input).find()) { return new PathOrGlobMatcher( root.getFileSystem().getPathMatcher("glob:" + input), input); } return new PathOrGlobMatcher(root, input); } }) // And now remove any null patterns .filter(Objects::nonNull) .toList()); return builder.build(); } public final Path getRootPath() { return projectRoot; } /** * Hook for virtual filesystems to materialise virtual files as Buck will need to be able to read * them past this point. */ public void ensureConcreteFilesExist(BuckEventBus eventBus) { delegate.ensureConcreteFilesExist(eventBus); } /** * @return the specified {@code path} resolved against {@link #getRootPath()} to an absolute path. */ public Path resolve(Path path) { return MorePaths.normalize(getPathForRelativePath(path).toAbsolutePath()); } public Path resolve(String path) { return MorePaths.normalize(getRootPath().resolve(path).toAbsolutePath()); } /** Construct a relative path between the project root and a given path. */ public Path relativize(Path path) { return projectRoot.relativize(path); } /** @return A {@link ImmutableSet} of {@link PathOrGlobMatcher} objects to have buck ignore. */ public ImmutableSet<PathOrGlobMatcher> getIgnorePaths() { return blackListedDirectories; } public Path getPathForRelativePath(Path pathRelativeToProjectRoot) { return delegate.getPathForRelativePath(pathRelativeToProjectRoot); } public Path getPathForRelativePath(String pathRelativeToProjectRoot) { return projectRoot.resolve(pathRelativeToProjectRoot); } /** * @param path Absolute path or path relative to the project root. * @return If {@code path} is relative, it is returned. If it is absolute and is inside the * project root, it is relativized to the project root and returned. Otherwise an absent value * is returned. */ public Optional<Path> getPathRelativeToProjectRoot(Path path) { path = MorePaths.normalize(path); if (path.isAbsolute()) { if (path.startsWith(projectRoot)) { return Optional.of(MorePaths.relativize(projectRoot, path)); } else { return Optional.empty(); } } else { return Optional.of(path); } } /** * As {@link #getPathForRelativePath(java.nio.file.Path)}, but with the added twist that the * existence of the path is checked before returning. */ public Path getPathForRelativeExistingPath(Path pathRelativeToProjectRoot) { Path file = getPathForRelativePath(pathRelativeToProjectRoot); if (ignoreValidityOfPaths) { return file; } if (exists(file)) { return file; } // TODO(mbolin): Eliminate this temporary exemption for symbolic links. if (isSymLink(file)) { return file; } throw new RuntimeException( String.format("Not an ordinary file: '%s'.", pathRelativeToProjectRoot)); } public boolean exists(Path pathRelativeToProjectRoot, LinkOption... options) { return delegate.exists(pathRelativeToProjectRoot, options); } public long getFileSize(Path pathRelativeToProjectRoot) throws IOException { Path path = getPathForRelativePath(pathRelativeToProjectRoot); if (!Files.isRegularFile(path)) { throw new IOException("Cannot get size of " + path + " because it is not an ordinary file."); } return Files.size(path); } /** * Deletes a file specified by its path relative to the project root. * * <p>Ignores the failure if the file does not exist. * * @param pathRelativeToProjectRoot path to the file * @return {@code true} if the file was deleted, {@code false} if it did not exist */ public boolean deleteFileAtPathIfExists(Path pathRelativeToProjectRoot) throws IOException { return Files.deleteIfExists(getPathForRelativePath(pathRelativeToProjectRoot)); } /** * Deletes a file specified by its path relative to the project root. * * @param pathRelativeToProjectRoot path to the file */ public void deleteFileAtPath(Path pathRelativeToProjectRoot) throws IOException { Files.delete(getPathForRelativePath(pathRelativeToProjectRoot)); } public Properties readPropertiesFile(Path propertiesFile) throws IOException { Properties properties = new Properties(); if (exists(propertiesFile)) { try (BufferedReader reader = new BufferedReader( new InputStreamReader(newFileInputStream(propertiesFile), Charsets.UTF_8))) { properties.load(reader); } return properties; } else { throw new FileNotFoundException(propertiesFile.toString()); } } /** Checks whether there is a normal file at the specified path. */ public boolean isFile(Path pathRelativeToProjectRoot, LinkOption... options) { return Files.isRegularFile(getPathForRelativePath(pathRelativeToProjectRoot), options); } public boolean isHidden(Path pathRelativeToProjectRoot) throws IOException { return Files.isHidden(getPathForRelativePath(pathRelativeToProjectRoot)); } /** * Similar to {@link #walkFileTree(Path, FileVisitor)} except this takes in a path relative to the * project root. */ public void walkRelativeFileTree( Path pathRelativeToProjectRoot, final FileVisitor<Path> fileVisitor) throws IOException { walkRelativeFileTree( pathRelativeToProjectRoot, EnumSet.of(FileVisitOption.FOLLOW_LINKS), fileVisitor); } /** Walks a project-root relative file tree with a visitor and visit options. */ public void walkRelativeFileTree( Path pathRelativeToProjectRoot, EnumSet<FileVisitOption> visitOptions, final FileVisitor<Path> fileVisitor) throws IOException { FileVisitor<Path> relativizingVisitor = new FileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { // TODO(mbolin): We should not have hardcoded logic for Eden here. Instead, we should // properly handle cyclic symlinks in a general way. // Failure to perform this check will result in a java.nio.file.FileSystemLoopException // in Eden. if (EDEN_MAGIC_PATH_ELEMENT.equals(dir.getFileName())) { return FileVisitResult.SKIP_SUBTREE; } return fileVisitor.preVisitDirectory(relativize(dir), attrs); } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { return fileVisitor.visitFile(relativize(file), attrs); } @Override public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { return fileVisitor.visitFileFailed(relativize(file), exc); } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { return fileVisitor.postVisitDirectory(relativize(dir), exc); } }; Path rootPath = getPathForRelativePath(pathRelativeToProjectRoot); walkFileTree(rootPath, visitOptions, relativizingVisitor); } /** Allows {@link Files#walkFileTree} to be faked in tests. */ public void walkFileTree(Path root, FileVisitor<Path> fileVisitor) throws IOException { root = getPathForRelativePath(root); walkFileTree(root, EnumSet.noneOf(FileVisitOption.class), fileVisitor); } public void walkFileTree(Path root, Set<FileVisitOption> options, FileVisitor<Path> fileVisitor) throws IOException { new FileTreeWalker(root, options, fileVisitor).walk(); } public ImmutableSet<Path> getFilesUnderPath(Path pathRelativeToProjectRoot) throws IOException { return getFilesUnderPath(pathRelativeToProjectRoot, x -> true); } public ImmutableSet<Path> getFilesUnderPath( Path pathRelativeToProjectRoot, Predicate<Path> predicate) throws IOException { return getFilesUnderPath( pathRelativeToProjectRoot, predicate, EnumSet.of(FileVisitOption.FOLLOW_LINKS)); } public ImmutableSet<Path> getFilesUnderPath( Path pathRelativeToProjectRoot, final Predicate<Path> predicate, EnumSet<FileVisitOption> visitOptions) throws IOException { final ImmutableSet.Builder<Path> paths = ImmutableSet.builder(); walkRelativeFileTree( pathRelativeToProjectRoot, visitOptions, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path path, BasicFileAttributes attributes) { if (predicate.apply(path)) { paths.add(path); } return FileVisitResult.CONTINUE; } }); return paths.build(); } /** Allows {@link Files#isDirectory} to be faked in tests. */ public boolean isDirectory(Path child, LinkOption... linkOptions) { return Files.isDirectory(resolve(child), linkOptions); } /** Allows {@link Files#isExecutable} to be faked in tests. */ public boolean isExecutable(Path child) { return delegate.isExecutable(child); } public ImmutableCollection<Path> getDirectoryContents(Path pathToUse) throws IOException { Path path = getPathForRelativePath(pathToUse); try (DirectoryStream<Path> stream = Files.newDirectoryStream(path)) { return FluentIterable.from(stream) .filter(input -> !isIgnored(relativize(input))) .transform(absolutePath -> MorePaths.relativize(projectRoot, absolutePath)) .toSortedList(Comparator.naturalOrder()); } } @VisibleForTesting protected PathListing.PathModifiedTimeFetcher getLastModifiedTimeFetcher() { return path -> ProjectFilesystem.this.getLastModifiedTime(path); } /** * Returns the files inside {@code pathRelativeToProjectRoot} which match {@code globPattern}, * ordered in descending last modified time order. This will not obey the results of {@link * #isIgnored(Path)}. */ public ImmutableSortedSet<Path> getMtimeSortedMatchingDirectoryContents( Path pathRelativeToProjectRoot, String globPattern) throws IOException { Path path = getPathForRelativePath(pathRelativeToProjectRoot); return PathListing.listMatchingPaths(path, globPattern, getLastModifiedTimeFetcher()); } public FileTime getLastModifiedTime(Path pathRelativeToProjectRoot) throws IOException { Path path = getPathForRelativePath(pathRelativeToProjectRoot); return Files.getLastModifiedTime(path); } /** Sets the last modified time for the given path. */ public Path setLastModifiedTime(Path pathRelativeToProjectRoot, FileTime time) throws IOException { Path path = getPathForRelativePath(pathRelativeToProjectRoot); return Files.setLastModifiedTime(path, time); } /** * Recursively delete everything under the specified path. Ignore the failure if the file at the * specified path does not exist. */ public void deleteRecursivelyIfExists(Path pathRelativeToProjectRoot) throws IOException { MoreFiles.deleteRecursivelyIfExists(resolve(pathRelativeToProjectRoot)); } /** * Resolves the relative path against the project root and then calls {@link * Files#createDirectories(java.nio.file.Path, java.nio.file.attribute.FileAttribute[])} */ public void mkdirs(Path pathRelativeToProjectRoot) throws IOException { Path resolved = resolve(pathRelativeToProjectRoot); try { Files.createDirectories(resolved); } catch (FileAlreadyExistsException e) { // Don't complain if the file is a symlink that points to a valid directory. // This check is done only on exception as it's a rare case, and lstat is not free. if (!Files.isDirectory(resolved)) { throw e; } } } /** Creates a new file relative to the project root. */ public Path createNewFile(Path pathRelativeToProjectRoot) throws IOException { Path path = getPathForRelativePath(pathRelativeToProjectRoot); return Files.createFile(path); } /** * // @deprecated Prefer operating on {@code Path}s directly, replaced by {@link * #createParentDirs(java.nio.file.Path)}. */ public void createParentDirs(String pathRelativeToProjectRoot) throws IOException { Path file = getPathForRelativePath(pathRelativeToProjectRoot); mkdirs(file.getParent()); } /** * @param pathRelativeToProjectRoot Must identify a file, not a directory. (Unfortunately, we have * no way to assert this because the path is not expected to exist yet.) */ public void createParentDirs(Path pathRelativeToProjectRoot) throws IOException { Path file = resolve(pathRelativeToProjectRoot); Path directory = file.getParent(); mkdirs(directory); } /** * Writes each line in {@code lines} with a trailing newline to a file at the specified path. * * <p>The parent path of {@code pathRelativeToProjectRoot} must exist. */ public void writeLinesToPath( Iterable<String> lines, Path pathRelativeToProjectRoot, FileAttribute<?>... attrs) throws IOException { try (Writer writer = new BufferedWriter( new OutputStreamWriter( newFileOutputStream(pathRelativeToProjectRoot, attrs), Charsets.UTF_8))) { for (String line : lines) { writer.write(line); writer.write('\n'); } } } public void writeContentsToPath( String contents, Path pathRelativeToProjectRoot, FileAttribute<?>... attrs) throws IOException { writeBytesToPath(contents.getBytes(Charsets.UTF_8), pathRelativeToProjectRoot, attrs); } public void writeBytesToPath( byte[] bytes, Path pathRelativeToProjectRoot, FileAttribute<?>... attrs) throws IOException { // No need to buffer writes when writing a single piece of data. try (OutputStream outputStream = newUnbufferedFileOutputStream(pathRelativeToProjectRoot, /* append */ false, attrs)) { outputStream.write(bytes); } } public OutputStream newFileOutputStream(Path pathRelativeToProjectRoot, FileAttribute<?>... attrs) throws IOException { return newFileOutputStream(pathRelativeToProjectRoot, /* append */ false, attrs); } public OutputStream newFileOutputStream( Path pathRelativeToProjectRoot, boolean append, FileAttribute<?>... attrs) throws IOException { return new BufferedOutputStream( newUnbufferedFileOutputStream(pathRelativeToProjectRoot, append, attrs)); } public OutputStream newUnbufferedFileOutputStream( Path pathRelativeToProjectRoot, boolean append, FileAttribute<?>... attrs) throws IOException { return Channels.newOutputStream( Files.newByteChannel( getPathForRelativePath(pathRelativeToProjectRoot), append ? ImmutableSet.of(StandardOpenOption.CREATE, StandardOpenOption.APPEND) : ImmutableSet.of( StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE), attrs)); } public <A extends BasicFileAttributes> A readAttributes( Path pathRelativeToProjectRoot, Class<A> type, LinkOption... options) throws IOException { return Files.readAttributes(getPathForRelativePath(pathRelativeToProjectRoot), type, options); } public InputStream newFileInputStream(Path pathRelativeToProjectRoot) throws IOException { return new BufferedInputStream( Files.newInputStream(getPathForRelativePath(pathRelativeToProjectRoot))); } /** @param inputStream Source of the bytes. This method does not close this stream. */ public void copyToPath( InputStream inputStream, Path pathRelativeToProjectRoot, CopyOption... options) throws IOException { Files.copy(inputStream, getPathForRelativePath(pathRelativeToProjectRoot), options); } /** Copies a file to an output stream. */ public void copyToOutputStream(Path pathRelativeToProjectRoot, OutputStream out) throws IOException { Files.copy(getPathForRelativePath(pathRelativeToProjectRoot), out); } public Optional<String> readFileIfItExists(Path pathRelativeToProjectRoot) { Path fileToRead = getPathForRelativePath(pathRelativeToProjectRoot); return readFileIfItExists(fileToRead, pathRelativeToProjectRoot.toString()); } private Optional<String> readFileIfItExists(Path fileToRead, String pathRelativeToProjectRoot) { if (Files.isRegularFile(fileToRead)) { String contents; try { contents = new String(Files.readAllBytes(fileToRead), Charsets.UTF_8); } catch (IOException e) { // Alternatively, we could return Optional.empty(), though something seems suspicious if we // have already verified that fileToRead is a file and then we cannot read it. throw new RuntimeException("Error reading " + pathRelativeToProjectRoot, e); } return Optional.of(contents); } else { return Optional.empty(); } } /** * Attempts to read the first line of the file specified by the relative path. If the file does * not exist, is empty, or encounters an error while being read, {@link Optional#empty()} is * returned. Otherwise, an {@link Optional} with the first line of the file will be returned. * * <p>// @deprecated PRefero operation on {@code Path}s directly, replaced by {@link * #readFirstLine(java.nio.file.Path)} */ public Optional<String> readFirstLine(String pathRelativeToProjectRoot) { return readFirstLine(projectRoot.getFileSystem().getPath(pathRelativeToProjectRoot)); } /** * Attempts to read the first line of the file specified by the relative path. If the file does * not exist, is empty, or encounters an error while being read, {@link Optional#empty()} is * returned. Otherwise, an {@link Optional} with the first line of the file will be returned. */ public Optional<String> readFirstLine(Path pathRelativeToProjectRoot) { Path file = getPathForRelativePath(pathRelativeToProjectRoot); return readFirstLineFromFile(file); } /** * Attempts to read the first line of the specified file. If the file does not exist, is empty, or * encounters an error while being read, {@link Optional#empty()} is returned. Otherwise, an * {@link Optional} with the first line of the file will be returned. */ public Optional<String> readFirstLineFromFile(Path file) { try { try (BufferedReader reader = Files.newBufferedReader(file, Charsets.UTF_8)) { return Optional.ofNullable(reader.readLine()); } } catch (IOException e) { // Because the file is not even guaranteed to exist, swallow the IOException. return Optional.empty(); } } public List<String> readLines(Path pathRelativeToProjectRoot) throws IOException { Path file = getPathForRelativePath(pathRelativeToProjectRoot); return Files.readAllLines(file, Charsets.UTF_8); } /** * // @deprecated Prefer operation on {@code Path}s directly, replaced by {@link * Files#newInputStream(java.nio.file.Path, java.nio.file.OpenOption...)}. */ public InputStream getInputStreamForRelativePath(Path path) throws IOException { Path file = getPathForRelativePath(path); return Files.newInputStream(file); } public Sha1HashCode computeSha1(Path pathRelativeToProjectRootOrJustAbsolute) throws IOException { return delegate.computeSha1(pathRelativeToProjectRootOrJustAbsolute); } public String computeSha256(Path pathRelativeToProjectRoot) throws IOException { Path fileToHash = getPathForRelativePath(pathRelativeToProjectRoot); return Hashing.sha256().hashBytes(Files.readAllBytes(fileToHash)).toString(); } public void copy(Path source, Path target, CopySourceMode sourceMode) throws IOException { source = getPathForRelativePath(source); switch (sourceMode) { case FILE: Files.copy(resolve(source), resolve(target), StandardCopyOption.REPLACE_EXISTING); break; case DIRECTORY_CONTENTS_ONLY: MoreFiles.copyRecursively(resolve(source), resolve(target)); break; case DIRECTORY_AND_CONTENTS: MoreFiles.copyRecursively(resolve(source), resolve(target.resolve(source.getFileName()))); break; } } public void move(Path source, Path target, CopyOption... options) throws IOException { Files.move(resolve(source), resolve(target), options); } public void copyFolder(Path source, Path target) throws IOException { copy(source, target, CopySourceMode.DIRECTORY_CONTENTS_ONLY); } public void copyFile(Path source, Path target) throws IOException { copy(source, target, CopySourceMode.FILE); } public void createSymLink(Path symLink, Path realFile, boolean force) throws IOException { symLink = resolve(symLink); if (force) { Files.deleteIfExists(symLink); } if (Platform.detect() == Platform.WINDOWS) { if (windowsSymlinks) { // Windows symlinks are not enabled by default, so symlinks on windows are created // only when they are explicitly enabled realFile = MorePaths.normalize(symLink.getParent().resolve(realFile)); WindowsFS.createSymbolicLink(symLink, realFile, isDirectory(realFile)); } else { // otherwise, creating hardlinks if (isDirectory(realFile)) { // Hardlinks are only for files - so, copying folders MoreFiles.copyRecursively(realFile, symLink); } else { realFile = MorePaths.normalize(symLink.getParent().resolve(realFile)); Files.createLink(symLink, realFile); } } } else { Files.createSymbolicLink(symLink, realFile); } } /** * Returns the set of POSIX file permissions, or the empty set if the underlying file system does * not support POSIX file attributes. */ public Set<PosixFilePermission> getPosixFilePermissions(Path path) throws IOException { Path resolvedPath = getPathForRelativePath(path); if (Files.getFileAttributeView(resolvedPath, PosixFileAttributeView.class) != null) { return Files.getPosixFilePermissions(resolvedPath); } else { return ImmutableSet.of(); } } /** Returns true if the file under {@code path} exists and is a symbolic link, false otherwise. */ public boolean isSymLink(Path path) { return delegate.isSymlink(path); } /** Returns the target of the specified symbolic link. */ public Path readSymLink(Path path) throws IOException { return Files.readSymbolicLink(getPathForRelativePath(path)); } /** * Takes a sequence of paths relative to the project root and writes a zip file to {@code out} * with the contents and structure that matches that of the specified paths. */ public void createZip(Collection<Path> pathsToIncludeInZip, Path out) throws IOException { try (CustomZipOutputStream zip = ZipOutputStreams.newOutputStream(out)) { for (Path path : pathsToIncludeInZip) { boolean isDirectory = isDirectory(path); CustomZipEntry entry = new CustomZipEntry(path, isDirectory); // We want deterministic ZIPs, so avoid mtimes. entry.setFakeTime(); entry.setExternalAttributes(getFileAttributesForZipEntry(path)); zip.putNextEntry(entry); if (!isDirectory) { try (InputStream input = newFileInputStream(path)) { ByteStreams.copy(input, zip); } } zip.closeEntry(); } } } public Manifest getJarManifest(Path path) throws IOException { Path absolutePath = getPathForRelativePath(path); try (JarFile jarFile = new JarFile(absolutePath.toFile())) { return jarFile.getManifest(); } } public long getFileAttributesForZipEntry(Path path) throws IOException { long mode = 0; // Support executable files. If we detect this file is executable, store this // information as 0100 in the field typically used in zip implementations for // POSIX file permissions. We'll use this information when unzipping. if (isExecutable(path)) { mode |= MorePosixFilePermissions.toMode(EnumSet.of(PosixFilePermission.OWNER_EXECUTE)); } if (isDirectory(path)) { mode |= MoreFiles.S_IFDIR; } else if (isFile(path)) { mode |= MoreFiles.S_IFREG; } // Propagate any additional permissions mode |= MorePosixFilePermissions.toMode(getPosixFilePermissions(path)); return mode << 16; } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof ProjectFilesystem)) { return false; } ProjectFilesystem that = (ProjectFilesystem) other; if (!Objects.equals(projectRoot, that.projectRoot)) { return false; } if (!Objects.equals(blackListedPaths, that.blackListedPaths)) { return false; } return true; } @Override public String toString() { return String.format( "%s (projectRoot=%s, hash(blackListedPaths)=%s)", super.toString(), projectRoot, blackListedPaths.hashCode()); } @Override public int hashCode() { return Objects.hash(projectRoot, blackListedPaths); } public BuckPaths getBuckPaths() { return buckPaths; } /** * @param path the path to check. * @return whether ignoredPaths contains path or any of its ancestors. */ public boolean isIgnored(Path path) { Preconditions.checkArgument(!path.isAbsolute()); for (PathOrGlobMatcher blackListedPath : blackListedPaths) { if (blackListedPath.matches(path)) { return true; } } return false; } /** * Returns a relative path whose parent directory is guaranteed to exist. The path will be under * {@code buck-out}, so it is safe to write to. */ public Path createTempFile(String prefix, String suffix, FileAttribute<?>... attrs) throws IOException { return createTempFile(tmpDir.get(), prefix, suffix, attrs); } /** * Prefer {@link #createTempFile(String, String, FileAttribute[])} so that temporary files are * guaranteed to be created under {@code buck-out}. This method will be deprecated once t12079608 * is resolved. */ public Path createTempFile( Path directory, String prefix, String suffix, FileAttribute<?>... attrs) throws IOException { Path tmp = Files.createTempFile(resolve(directory), prefix, suffix, attrs); return getPathRelativeToProjectRoot(tmp).orElse(tmp); } public void touch(Path fileToTouch) throws IOException { if (exists(fileToTouch)) { setLastModifiedTime(fileToTouch, FileTime.fromMillis(System.currentTimeMillis())); } else { createNewFile(fileToTouch); } } /** * Converts a path string (or sequence of strings) to a Path with the same VFS as this instance. * * @see FileSystem#getPath(String, String...) */ public Path getPath(String first, String... rest) { return getRootPath().getFileSystem().getPath(first, rest); } /** * FileTreeWalker is used to walk files similar to Files.walkFileTree. * * <p>It has two major differences from walkFileTree. 1. It ignores files and directories ignored * by this ProjectFilesystem. 2. The walk is in a deterministic order. * * <p>And it has two minor differences. 1. It doesn't accept a depth limit. 2. It doesn't handle * the presence of a security manager the same way. */ private class FileTreeWalker { private final FileVisitor<Path> visitor; private final Path root; private final boolean followLinks; private ArrayDeque<DirWalkState> state; FileTreeWalker(Path root, Set<FileVisitOption> options, FileVisitor<Path> pathFileVisitor) { this.followLinks = options.contains(FileVisitOption.FOLLOW_LINKS); this.visitor = pathFileVisitor; this.root = root; this.state = new ArrayDeque<>(); } private ImmutableList<Path> getContents(Path root) throws IOException { try (DirectoryStream<Path> stream = Files.newDirectoryStream(root, input -> !isIgnored(relativize(input)))) { return FluentIterable.from(stream).toSortedList(Comparator.naturalOrder()); } } private class DirWalkState { final Path dir; final BasicFileAttributes attrs; final boolean isRootSentinel; UnmodifiableIterator<Path> iter; @Nullable IOException ioe = null; DirWalkState(Path directory, BasicFileAttributes attributes, boolean isRootSentinel) { this.dir = directory; this.attrs = attributes; if (isRootSentinel) { this.iter = ImmutableList.of(root).iterator(); } else { try { this.iter = getContents(directory).iterator(); } catch (IOException e) { this.iter = ImmutableList.<Path>of().iterator(); this.ioe = e; } } this.isRootSentinel = isRootSentinel; } } private void walk() throws IOException { state.add(new DirWalkState(root, getAttributes(root), true)); while (true) { FileVisitResult result; if (state.getLast().iter.hasNext()) { result = visitPath(state.getLast().iter.next()); } else { DirWalkState dirState = state.removeLast(); if (dirState.isRootSentinel) { return; } result = visitor.postVisitDirectory(dirState.dir, dirState.ioe); } Objects.requireNonNull(result, "FileVisitor returned a null FileVisitResult."); if (result == FileVisitResult.SKIP_SIBLINGS) { state.getLast().iter = ImmutableList.<Path>of().iterator(); } else if (result == FileVisitResult.TERMINATE) { return; } } } private FileVisitResult visitPath(Path p) throws IOException { BasicFileAttributes attrs; try { attrs = getAttributes(p); ensureNoLoops(p, attrs); } catch (IOException ioe) { return visitor.visitFileFailed(p, ioe); } if (attrs.isDirectory()) { FileVisitResult result = visitor.preVisitDirectory(p, attrs); if (result == FileVisitResult.CONTINUE) { state.add(new DirWalkState(p, attrs, false)); } return result; } else { return visitor.visitFile(p, attrs); } } private void ensureNoLoops(Path p, BasicFileAttributes attrs) throws FileSystemLoopException { if (!followLinks) { return; } if (!attrs.isDirectory()) { return; } if (willLoop(p, attrs)) { throw new FileSystemLoopException(p.toString()); } } private boolean willLoop(Path p, BasicFileAttributes attrs) { try { Object thisKey = attrs.fileKey(); for (DirWalkState s : state) { if (s.isRootSentinel) { continue; } Object thatKey = s.attrs.fileKey(); if (thisKey != null && thatKey != null) { if (thisKey.equals(thatKey)) { return true; } } else if (Files.isSameFile(p, s.dir)) { return true; } } } catch (IOException e) { return true; } return false; } private BasicFileAttributes getAttributes(Path root) throws IOException { if (!followLinks) { return Files.readAttributes(root, BasicFileAttributes.class, LinkOption.NOFOLLOW_LINKS); } try { return Files.readAttributes(root, BasicFileAttributes.class); } catch (IOException e) { return Files.readAttributes(root, BasicFileAttributes.class, LinkOption.NOFOLLOW_LINKS); } } } }
/* * Copyright 2013 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.openntf.domino.design.impl; import java.util.HashSet; import java.util.Set; import java.util.logging.Logger; import org.openntf.domino.utils.DominoUtils; import org.openntf.domino.utils.TypeUtils; import org.openntf.domino.utils.xml.XMLNode; import com.ibm.commons.util.StringUtil; /** * @author jgallagher * */ public class FormField implements org.openntf.domino.design.FormField { @SuppressWarnings("unused") private static final Logger log_ = Logger.getLogger(FormField.class.getName()); private final XMLNode node_; public FormField(final XMLNode node) { node_ = node; } @Override public Kind getKind() { return Kind.valueOf(node_.getAttribute("kind").toUpperCase()); } @Override public void setKind(final Kind kind) { node_.setAttribute("kind", kind.toString().toLowerCase()); } @Override public String getName() { return node_.getAttribute("name"); } @Override public void setName(final String name) { node_.setAttribute("name", name); } @Override public boolean isAllowMultiValues() { return node_.getAttribute("allowmultivalues").equals("true"); } @Override public void setAllowMultiValues(final boolean allowMultiValues) { node_.setAttribute("allowmultivalues", String.valueOf(allowMultiValues)); } @Override public boolean isProtected() { return node_.getAttribute("protected").equals("true"); } @Override public void setProtected(final boolean _protected) { node_.setAttribute("protected", String.valueOf(_protected)); } @Override public boolean isSign() { return node_.getAttribute("sign").equals("true"); } @Override public void setSign(final boolean sign) { node_.setAttribute("sign", String.valueOf(sign)); } @Override public boolean isSeal() { return node_.getAttribute("seal").equals("true"); } @Override public void setSeal(final boolean seal) { node_.setAttribute("seal", String.valueOf(seal)); } @Override public boolean isLookUpAddressOnRefresh() { return node_.getAttribute("lookupaddressonrefresh").equals("true"); } @Override public void setLookUpAddressOnRefresh(final boolean lookUpAddressOnRefresh) { node_.setAttribute("lookupaddressonrefresh", String.valueOf(lookUpAddressOnRefresh)); } @Override public boolean isLookUpEachChar() { return node_.getAttribute("lookupeachchar").equals("true"); } @Override public void setLookUpEachChar(final boolean lookUpEachChar) { node_.setAttribute("lookupeachchar", String.valueOf(lookUpEachChar)); } @Override public String getDefaultValueFormula() { XMLNode node = this.getDefaultValueFormulaNode(); if (node != null) { return node.getText(); } return ""; } @Override public void setDefaultValueFormula(final String defaultValueFormula) { // DXL is not happy with empty default value nodes, so delete when empty XMLNode node = this.getDefaultValueFormulaNode(); if (defaultValueFormula == null || defaultValueFormula.length() == 0) { if (node != null) { node_.removeChild(node.getParentNode()); } } else { if (node == null) { if (defaultValueFormula == null || defaultValueFormula.length() == 0) { return; } node = this.createDefaultValueFormulaNode(); } node.setText(defaultValueFormula); } } // DXL uses the "keyword" field type for several field types, so it's more convenient to make a new faux // attribute to handle referring to the field type like a human might @Override public Type getFieldType() { String type = node_.getAttribute("type"); if (type.equals("keyword")) { XMLNode keywords = this.getKeywordsNode(); String ui = keywords.getAttribute("ui"); if (ui.equals("checkbox")) { return Type.CHECKBOX; } else if (ui.equals("radiobutton")) { return Type.RADIOBUTTON; } else if (ui.equals("combobox")) { return Type.COMBOBOX; } else { return Type.DIALOGLIST; } } else { return Type.valueOf(type.toUpperCase()); } } @Override public void setFieldType(final Type fieldType) { try { switch (fieldType) { case CHECKBOX: case COMBOBOX: case DIALOGLIST: case RADIOBUTTON: node_.setAttribute("type", "keyword"); XMLNode keywords = this.getKeywordsNode(); keywords.setAttribute("ui", fieldType.toString().toLowerCase()); keywords.setAttribute("helperbutton", String.valueOf(fieldType.name().equals("dialoglist"))); if (keywords.getAttribute("columns").length() == 0) { keywords.setAttribute("columns", "1"); } break; default: node_.setAttribute("type", fieldType.toString().toLowerCase()); if (fieldType == Type.PASSWORD) { node_.setAttribute("seal", "true"); } else if (fieldType == Type.RICHTEXTLITE) { if (getKind() == Kind.COMPUTEDFORDISPLAY || getKind() == Kind.COMPUTEDWHENCOMPOSED) { this.setKind(Kind.COMPUTED); } if (node_.getAttribute("onlyallow").isEmpty()) { node_.setAttribute("onlyallow", "picture sharedimage attachment view datepicker sharedapplet text object calendar inbox help clear graphic link"); } if (node_.getAttribute("firstdisplay").isEmpty()) { node_.setAttribute("firstdisplay", "text"); } } else if (fieldType == Type.RICHTEXT && (getKind() == Kind.COMPUTEDFORDISPLAY || getKind() == Kind.COMPUTEDWHENCOMPOSED)) { setKind(Kind.COMPUTED); } break; } } catch (Exception e) { DominoUtils.handleException(e); } } @Override public RTLType getFirstDisplay() { String firstDisplay = node_.getAttribute("firstdisplay"); if (!StringUtil.isEmpty(firstDisplay)) { return RTLType.valueOf(firstDisplay.toUpperCase()); } return null; } @Override public void setFirstDisplay(final RTLType firstDisplay) { if (firstDisplay != null) { node_.setAttribute("firstdisplay", firstDisplay.toString().toLowerCase()); } else { node_.setAttribute("firstdisplay", ""); } } @Override public Set<RTLType> getOnlyAllow() { String values = node_.getAttribute("onlyallow"); Set<RTLType> result = new HashSet<RTLType>(); for (String val : values.split("\\s")) { if (StringUtil.isNotEmpty(val)) { result.add(RTLType.valueOf(val.toUpperCase())); } } return result; } @Override public void setOnlyAllow(final Set<RTLType> onlyAllow) { if (onlyAllow != null) { node_.setAttribute("onlyallow", TypeUtils.join(onlyAllow, " ").toLowerCase()); } else { node_.setAttribute("onlyallow", ""); } } @Override public String getFieldHelp() { return node_.getAttribute("fieldhelp"); } @Override public void setFieldHelp(final String fieldHelp) { node_.setAttribute("fieldhelp", fieldHelp); } /* ****************************************************************************************** * Internal utility methods ********************************************************************************************/ private XMLNode getKeywordsNode() { XMLNode node = node_.selectSingleNode("keywords"); if (node == null) { node = node_.addChildElement("keywords"); } return node; } private XMLNode getDefaultValueFormulaNode() { XMLNode node = node_.selectSingleNode("code[@event='defaultvalue']"); if (node == null) { return null; } else { node = node.selectSingleNode("formula"); } return node; } private XMLNode createDefaultValueFormulaNode() { XMLNode node = node_.addChildElement("code"); node.setAttribute("event", "defaultvalue"); node = node.addChildElement("formula"); return node; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package storm.trident.windowing; import java.io.Serializable; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Queue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import backtype.storm.Config; import backtype.storm.task.TopologyContext; import backtype.storm.tuple.Fields; import storm.trident.operation.Aggregator; import storm.trident.planner.ProcessorContext; import storm.trident.planner.TridentProcessor; import storm.trident.planner.processor.FreshCollector; import storm.trident.planner.processor.TridentContext; import storm.trident.spout.IBatchID; import storm.trident.tuple.ConsList; import storm.trident.tuple.TridentTuple; import storm.trident.tuple.TridentTupleView; import storm.trident.windowing.config.WindowConfig; /** * {@code TridentProcessor} implementation for windowing operations on trident stream. * */ public class WindowTridentProcessor implements TridentProcessor { private static final Logger LOG = LoggerFactory.getLogger(WindowTridentProcessor.class); public static final String TRIGGER_INPROCESS_PREFIX = "tip" + WindowsStore.KEY_SEPARATOR; public static final String TRIGGER_PREFIX = "tr" + WindowsStore.KEY_SEPARATOR; public static final String TRIGGER_COUNT_PREFIX = "tc" + WindowsStore.KEY_SEPARATOR; public static final String TRIGGER_FIELD_NAME = "_task_info"; public static final long DEFAULT_INMEMORY_TUPLE_CACHE_LIMIT = 100L; private final String windowId; private final Fields inputFields; private final Aggregator aggregator; private final boolean storeTuplesInStore; private String windowTriggerInprocessId; private WindowConfig windowConfig; private WindowsStoreFactory windowStoreFactory; private WindowsStore windowStore; private Map conf; private TopologyContext topologyContext; private FreshCollector collector; private TridentTupleView.ProjectionFactory projection; private TridentContext tridentContext; private ITridentWindowManager tridentWindowManager; private String windowTaskId; public WindowTridentProcessor(WindowConfig windowConfig, String uniqueWindowId, WindowsStoreFactory windowStoreFactory, Fields inputFields, Aggregator aggregator, boolean storeTuplesInStore) { this.windowConfig = windowConfig; this.windowId = uniqueWindowId; this.windowStoreFactory = windowStoreFactory; this.inputFields = inputFields; this.aggregator = aggregator; this.storeTuplesInStore = storeTuplesInStore; } @Override public void prepare(Map conf, TopologyContext context, TridentContext tridentContext) { this.conf = conf; this.topologyContext = context; List<TridentTuple.Factory> parents = tridentContext.getParentTupleFactories(); if (parents.size() != 1) { throw new RuntimeException("Aggregation related operation can only have one parent"); } Long maxTuplesCacheSize = getWindowTuplesCacheSize(conf); this.tridentContext = tridentContext; collector = new FreshCollector(tridentContext); projection = new TridentTupleView.ProjectionFactory(parents.get(0), inputFields); windowStore = windowStoreFactory.create(); windowTaskId = windowId + WindowsStore.KEY_SEPARATOR + topologyContext.getThisTaskId() + WindowsStore.KEY_SEPARATOR; windowTriggerInprocessId = getWindowTriggerInprocessIdPrefix(windowTaskId); tridentWindowManager = storeTuplesInStore ? new StoreBasedTridentWindowManager(windowConfig, windowTaskId, windowStore, aggregator, tridentContext.getDelegateCollector(), maxTuplesCacheSize, inputFields) : new InMemoryTridentWindowManager(windowConfig, windowTaskId, windowStore, aggregator, tridentContext.getDelegateCollector()); tridentWindowManager.prepare(); } public static String getWindowTriggerInprocessIdPrefix(String windowTaskId) { return TRIGGER_INPROCESS_PREFIX + windowTaskId; } public static String getWindowTriggerTaskPrefix(String windowTaskId) { return TRIGGER_PREFIX + windowTaskId; } private Long getWindowTuplesCacheSize(Map conf) { if (conf.containsKey(Config.TOPOLOGY_TRIDENT_WINDOWING_INMEMORY_CACHE_LIMIT)) { return ((Number) conf.get(Config.TOPOLOGY_TRIDENT_WINDOWING_INMEMORY_CACHE_LIMIT)).longValue(); } return DEFAULT_INMEMORY_TUPLE_CACHE_LIMIT; } @Override public void cleanup() { LOG.info("shutting down window manager"); try { tridentWindowManager.shutdown(); } catch (Exception ex) { LOG.error("Error occurred while cleaning up window processor", ex); throw ex; } } @Override public void startBatch(ProcessorContext processorContext) { // initialize state for batch processorContext.state[tridentContext.getStateIndex()] = new ArrayList<TridentTuple>(); } @Override public void execute(ProcessorContext processorContext, String streamId, TridentTuple tuple) { // add tuple to the batch state Object state = processorContext.state[tridentContext.getStateIndex()]; ((List<TridentTuple>) state).add(projection.create(tuple)); } @Override public void finishBatch(ProcessorContext processorContext) { Object batchId = processorContext.batchId; Object batchTxnId = getBatchTxnId(batchId); LOG.debug("Received finishBatch of : [{}] ", batchId); // get all the tuples in a batch and add it to trident-window-manager List<TridentTuple> tuples = (List<TridentTuple>) processorContext.state[tridentContext.getStateIndex()]; tridentWindowManager.addTuplesBatch(batchId, tuples); List<Integer> pendingTriggerIds = null; List<String> triggerKeys = new ArrayList<>(); Iterable<Object> triggerValues = null; if (retriedAttempt(batchId)) { pendingTriggerIds = (List<Integer>) windowStore.get(inprocessTriggerKey(batchTxnId)); for (Integer pendingTriggerId : pendingTriggerIds) { triggerKeys.add(triggerKey(pendingTriggerId)); } triggerValues = windowStore.get(triggerKeys); } // if there are no trigger values in earlier attempts or this is a new batch, emit pending triggers. if(triggerValues == null) { pendingTriggerIds = new ArrayList<>(); Queue<StoreBasedTridentWindowManager.TriggerResult> pendingTriggers = tridentWindowManager.getPendingTriggers(); LOG.debug("pending triggers at batch: [{}] and triggers.size: [{}] ", batchId, pendingTriggers.size()); try { Iterator<StoreBasedTridentWindowManager.TriggerResult> pendingTriggersIter = pendingTriggers.iterator(); List<Object> values = new ArrayList<>(); StoreBasedTridentWindowManager.TriggerResult triggerResult = null; while (pendingTriggersIter.hasNext()) { triggerResult = pendingTriggersIter.next(); for (List<Object> aggregatedResult : triggerResult.result) { String triggerKey = triggerKey(triggerResult.id); triggerKeys.add(triggerKey); values.add(aggregatedResult); pendingTriggerIds.add(triggerResult.id); } pendingTriggersIter.remove(); } triggerValues = values; } finally { // store inprocess triggers of a batch in store for batch retries for any failures if (!pendingTriggerIds.isEmpty()) { windowStore.put(inprocessTriggerKey(batchTxnId), pendingTriggerIds); } } } collector.setContext(processorContext); int i = 0; for (Object resultValue : triggerValues) { collector.emit(new ConsList(new TriggerInfo(windowTaskId, pendingTriggerIds.get(i++)), (List<Object>) resultValue)); } collector.setContext(null); } private String inprocessTriggerKey(Object batchTxnId) { return windowTriggerInprocessId + batchTxnId; } public static Object getBatchTxnId(Object batchId) { if (batchId instanceof IBatchID) { return ((IBatchID) batchId).getId(); } return null; } static boolean retriedAttempt(Object batchId) { if (batchId instanceof IBatchID) { return ((IBatchID) batchId).getAttemptId() > 0; } return false; } @Override public TridentTuple.Factory getOutputFactory() { return collector.getOutputFactory(); } public static class TriggerInfo implements Serializable { public final String windowTaskId; public final int triggerId; public TriggerInfo(String windowTaskId, int triggerId) { this.windowTaskId = windowTaskId; this.triggerId = triggerId; } public String generateTriggerKey() { return generateWindowTriggerKey(windowTaskId, triggerId); } @Override public String toString() { return "TriggerInfo{" + "windowTaskId='" + windowTaskId + '\'' + ", triggerId=" + triggerId + '}'; } } public String triggerKey(int triggerId) { return generateWindowTriggerKey(windowTaskId, triggerId); } public static String generateWindowTriggerKey(String windowTaskId, int triggerId) { return TRIGGER_PREFIX + windowTaskId + triggerId; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2.provider.http5; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemConfigBuilder; import org.apache.commons.vfs2.FileSystemOptions; import org.apache.commons.vfs2.UserAuthenticator; import org.apache.hc.client5.http.cookie.Cookie; /** * Configuration options builder utility for http5 provider. * * @since 2.5.0 */ public class Http5FileSystemConfigBuilder extends FileSystemConfigBuilder { private static final Http5FileSystemConfigBuilder BUILDER = new Http5FileSystemConfigBuilder(); /** * Defines the maximum number of connections allowed overall. This value only applies * to the number of connections from a particular instance of HTTP connection manager. * <p> * This parameter expects a value of type {@link Integer}. * </p> */ private static final String MAX_TOTAL_CONNECTIONS = "http.connection-manager.max-total"; /** * Defines the maximum number of connections allowed per host configuration. * These values only apply to the number of connections from a particular instance * of HTTP connection manager. */ private static final String MAX_HOST_CONNECTIONS = "http.connection-manager.max-per-host"; /** * Defines the connection timeout of an HTTP request. * <p> * This parameter expects a value of type {@link Integer}. * </p> */ private static final String CONNECTION_TIMEOUT = "http.connection.timeout"; /** * Defines the socket timeout of an HTTP request. * <p> * This parameter expects a value of type {@link Integer}. * </p> */ private static final String SO_TIMEOUT = "http.socket.timeout"; /** * Defines whether Keep-Alive option is used or not. * <p> * This parameter expects a value of type {@link Boolean}. * </p> */ private static final String KEEP_ALIVE = "http.keepAlive"; /** * Defines the keystore file path for SSL connections. * <p> * This parameter expects a value of type {@link String}. * </p> */ private static final String KEYSTORE_FILE = "http.keystoreFile"; /** * Defines the keystore pass phrase for SSL connections. * <p> * This parameter expects a value of type {@link String}. * </p> */ private static final String KEYSTORE_PASS = "http.keystorePass"; /** * Defines whether the host name should be verified or not in SSL connections. * <p> * This parameter expects a value of type {@link Boolean}. * </p> */ private static final String HOSTNAME_VERIFICATION_ENABLED = "http.hostname-verification.enabled"; /** * Defines whether the HttpClient should follow redirections from the responses. * <p> * This parameter expects a value of type {@link Boolean}. * </p> */ protected static final String KEY_FOLLOW_REDIRECT = "followRedirect"; /** * Defines the User-Agent request header string of the underlying HttpClient. * <p> * This parameter expects a value of type {@link String}. * </p> */ private static final String KEY_USER_AGENT = "userAgent"; /** * Defines whether the preemptive authentication should be enabled or not. * <p> * This parameter expects a value of type {@link Boolean}. * </p> */ private static final String KEY_PREEMPTIVE_AUTHENTICATION = "preemptiveAuth"; /** * Defines the enabled TLS versions for the underlying HttpClient. * <p> * This parameter expects a value of type {@link String} as a comma separated string, each token of * which is the name of <code>org.apache.hc.core5.http.ssl.TLS</code> enum. e.g, "V_1_2, V_1_3". * </p> */ private static final String KEY_TLS_VERSIONS = "tlsVersions"; /** * The default value for {@link #MAX_TOTAL_CONNECTIONS} configuration. */ private static final int DEFAULT_MAX_CONNECTIONS = 50; /** * The default value for {@link #MAX_HOST_CONNECTIONS} configuration. */ private static final int DEFAULT_MAX_HOST_CONNECTIONS = 5; /** * The default value for {@link #CONNECTION_TIMEOUT} configuration. */ private static final int DEFAULT_CONNECTION_TIMEOUT = 0; /** * The default value for {@link #SO_TIMEOUT} configuration. */ private static final int DEFAULT_SO_TIMEOUT = 0; /** * The default value for {@link #KEEP_ALIVE} configuration. */ private static final boolean DEFAULT_KEEP_ALIVE = true; /** * The default value for {@link #KEY_FOLLOW_REDIRECT} configuration. */ private static final boolean DEFAULT_FOLLOW_REDIRECT = true; /** * The default value for {@link #KEY_USER_AGENT} configuration. */ private static final String DEFAULT_USER_AGENT = "Jakarta-Commons-VFS"; /** * The default value for {@link #HOSTNAME_VERIFICATION_ENABLED} configuration. */ private static final boolean DEFAULT_HOSTNAME_VERIFICATION_ENABLED = true; /** * The default value for {@link #KEY_TLS_VERSIONS} configuration as a comma separated string, each token of * which is the name of <code>org.apache.hc.core5.http.ssl.TLS</code> enum. e.g, "V_1_2, V_1_3". */ private static final String DEFAULT_TLS_VERSIONS = "V_1_2"; /** * Construct an <code>Http4FileSystemConfigBuilder</code>. * * @param prefix String for properties of this file system. */ protected Http5FileSystemConfigBuilder(final String prefix) { super(prefix); } private Http5FileSystemConfigBuilder() { super("http."); } /** * Gets the singleton builder. * * @return the singleton builder. */ public static Http5FileSystemConfigBuilder getInstance() { return BUILDER; } /** * Sets the charset used for url encoding. * * @param opts The FileSystem options. * @param chaset the chaset */ public void setUrlCharset(final FileSystemOptions opts, final String chaset) { setParam(opts, "urlCharset", chaset); } /** * Sets the charset used for url encoding. * * @param opts The FileSystem options. * @return the chaset */ public String getUrlCharset(final FileSystemOptions opts) { return getString(opts, "urlCharset"); } /** * Sets the proxy to use for http connection. * <p> * You have to set the ProxyPort too if you would like to have the proxy really used. * </p> * * @param opts The FileSystem options. * @param proxyHost the host * @see #setProxyPort */ public void setProxyHost(final FileSystemOptions opts, final String proxyHost) { setParam(opts, "proxyHost", proxyHost); } /** * Sets the proxy-port to use for http connection. You have to set the ProxyHost too if you would like to have the * proxy really used. * * @param opts The FileSystem options. * @param proxyPort the port * @see #setProxyHost */ public void setProxyPort(final FileSystemOptions opts, final int proxyPort) { setParam(opts, "proxyPort", Integer.valueOf(proxyPort)); } /** * Gets the proxy to use for http connection. You have to set the ProxyPort too if you would like to have the proxy * really used. * * @param opts The FileSystem options. * @return proxyHost * @see #setProxyPort */ public String getProxyHost(final FileSystemOptions opts) { return getString(opts, "proxyHost"); } /** * Gets the proxy-port to use for http the connection. You have to set the ProxyHost too if you would like to have * the proxy really used. * * @param opts The FileSystem options. * @return proxyPort: the port number or 0 if it is not set * @see #setProxyHost */ public int getProxyPort(final FileSystemOptions opts) { return getInteger(opts, "proxyPort", 0); } /** * Sets the proxy authenticator where the system should get the credentials from. * * @param opts The FileSystem options. * @param authenticator The UserAuthenticator. */ public void setProxyAuthenticator(final FileSystemOptions opts, final UserAuthenticator authenticator) { setParam(opts, "proxyAuthenticator", authenticator); } /** * Gets the proxy authenticator where the system should get the credentials from. * * @param opts The FileSystem options. * @return The UserAuthenticator. */ public UserAuthenticator getProxyAuthenticator(final FileSystemOptions opts) { return (UserAuthenticator) getParam(opts, "proxyAuthenticator"); } /** * The cookies to add to the request. * * @param opts The FileSystem options. * @param cookies An array of Cookies. */ public void setCookies(final FileSystemOptions opts, final Cookie[] cookies) { setParam(opts, "cookies", cookies); } /** * Sets whether to follow redirects for the connection. * * @param opts The FileSystem options. * @param redirect {@code true} to follow redirects, {@code false} not to. * @see #setFollowRedirect */ public void setFollowRedirect(final FileSystemOptions opts, final boolean redirect) { setParam(opts, KEY_FOLLOW_REDIRECT, redirect); } /** * Gets the cookies to add to the request. * * @param opts The FileSystem options. * @return the Cookie array. */ public Cookie[] getCookies(final FileSystemOptions opts) { return (Cookie[]) getParam(opts, "cookies"); } /** * Gets whether to follow redirects for the connection. * * @param opts The FileSystem options. * @return {@code true} to follow redirects, {@code false} not to. * @see #setFollowRedirect */ public boolean getFollowRedirect(final FileSystemOptions opts) { return getBoolean(opts, KEY_FOLLOW_REDIRECT, DEFAULT_FOLLOW_REDIRECT); } /** * Sets the maximum number of connections allowed. * * @param opts The FileSystem options. * @param maxTotalConnections The maximum number of connections. */ public void setMaxTotalConnections(final FileSystemOptions opts, final int maxTotalConnections) { setParam(opts, MAX_TOTAL_CONNECTIONS, Integer.valueOf(maxTotalConnections)); } /** * Gets the maximum number of connections allowed. * * @param opts The FileSystemOptions. * @return The maximum number of connections allowed. */ public int getMaxTotalConnections(final FileSystemOptions opts) { return getInteger(opts, MAX_TOTAL_CONNECTIONS, DEFAULT_MAX_CONNECTIONS); } /** * Sets the maximum number of connections allowed to any host. * * @param opts The FileSystem options. * @param maxHostConnections The maximum number of connections to a host. */ public void setMaxConnectionsPerHost(final FileSystemOptions opts, final int maxHostConnections) { setParam(opts, MAX_HOST_CONNECTIONS, Integer.valueOf(maxHostConnections)); } /** * Gets the maximum number of connections allowed per host. * * @param opts The FileSystemOptions. * @return The maximum number of connections allowed per host. */ public int getMaxConnectionsPerHost(final FileSystemOptions opts) { return getInteger(opts, MAX_HOST_CONNECTIONS, DEFAULT_MAX_HOST_CONNECTIONS); } /** * Determines if the FileSystemOptions indicate that preemptive authentication is requested. * * @param opts The FileSystemOptions. * @return true if preemptiveAuth is requested. */ public boolean isPreemptiveAuth(final FileSystemOptions opts) { return getBoolean(opts, KEY_PREEMPTIVE_AUTHENTICATION, Boolean.FALSE).booleanValue(); } /** * Sets the given value for preemptive HTTP authentication (using BASIC) on the given FileSystemOptions object. * Defaults to false if not set. It may be appropriate to set to true in cases when the resulting chattiness of the * conversation outweighs any architectural desire to use a stronger authentication scheme than basic/preemptive. * * @param opts The FileSystemOptions. * @param preemptiveAuth the desired setting; true=enabled and false=disabled. */ public void setPreemptiveAuth(final FileSystemOptions opts, final boolean preemptiveAuth) { setParam(opts, KEY_PREEMPTIVE_AUTHENTICATION, Boolean.valueOf(preemptiveAuth)); } /** * The connection timeout. * * @param opts The FileSystem options. * @param connectionTimeout The connection timeout. */ public void setConnectionTimeout(final FileSystemOptions opts, final int connectionTimeout) { setParam(opts, CONNECTION_TIMEOUT, Integer.valueOf(connectionTimeout)); } /** * Gets the connection timeout. * * @param opts The FileSystem options. * @return The connection timeout. */ public int getConnectionTimeout(final FileSystemOptions opts) { return getInteger(opts, CONNECTION_TIMEOUT, DEFAULT_CONNECTION_TIMEOUT); } /** * The socket timeout. * * @param opts The FileSystem options. * @param soTimeout socket timeout. */ public void setSoTimeout(final FileSystemOptions opts, final int soTimeout) { setParam(opts, SO_TIMEOUT, Integer.valueOf(soTimeout)); } /** * Gets the socket timeout. * * @param opts The FileSystemOptions. * @return The socket timeout. */ public int getSoTimeout(final FileSystemOptions opts) { return getInteger(opts, SO_TIMEOUT, DEFAULT_SO_TIMEOUT); } /** * Sets if the FileSystemOptions indicate that HTTP Keep-Alive is respected. * * @param opts The FileSystemOptions. * @param keepAlive whether the FileSystemOptions indicate that HTTP Keep-Alive is respected or not. */ public void setKeepAlive(final FileSystemOptions opts, final boolean keepAlive) { setParam(opts, KEEP_ALIVE, Boolean.valueOf(keepAlive)); } /** * Determines if the FileSystemOptions indicate that HTTP Keep-Alive is respected. * * @param opts The FileSystemOptions. * @return true if if the FileSystemOptions indicate that HTTP Keep-Alive is respected. */ public boolean isKeepAlive(final FileSystemOptions opts) { return getBoolean(opts, KEEP_ALIVE, DEFAULT_KEEP_ALIVE); } /** * Sets the user agent to attach to the outgoing http methods * * @param opts the file system options to modify * @param userAgent User Agent String */ public void setUserAgent(final FileSystemOptions opts, final String userAgent) { setParam(opts, "userAgent", userAgent); } /** * Gets the user agent string * * @param opts the file system options to modify * @return User provided User-Agent string, otherwise default of: Commons-VFS */ public String getUserAgent(final FileSystemOptions opts) { final String userAgent = (String) getParam(opts, KEY_USER_AGENT); return userAgent != null ? userAgent : DEFAULT_USER_AGENT; } /** * Set keystore file path for SSL connections. * @param opts the file system options to modify * @param keyStoreFile keystore file path */ public void setKeyStoreFile(final FileSystemOptions opts, final String keyStoreFile) { setParam(opts, KEYSTORE_FILE, keyStoreFile); } /** * Return keystore file path to be used in SSL connections. * @param opts the file system options to modify * @return keystore file path to be used in SSL connections */ public String getKeyStoreFile(final FileSystemOptions opts) { return (String) getParam(opts, KEYSTORE_FILE); } /** * Set keystore pass phrase for SSL connecdtions. * @param opts the file system options to modify * @param keyStorePass keystore pass phrase for SSL connecdtions */ public void setKeyStorePass(final FileSystemOptions opts, final String keyStorePass) { setParam(opts, KEYSTORE_PASS, keyStorePass); } /** * Return keystore pass phrase for SSL connections. * @param opts the file system options to modify * @return keystore pass phrase for SSL connections */ String getKeyStorePass(final FileSystemOptions opts) { return (String) getParam(opts, KEYSTORE_PASS); } /** * Sets if the hostname should be verified in SSL context. * * @param opts The FileSystemOptions. * @param hostnameVerificationEnabled whether hostname should be verified */ public void setHostnameVerificationEnabled(final FileSystemOptions opts, final boolean hostnameVerificationEnabled) { setParam(opts, HOSTNAME_VERIFICATION_ENABLED, Boolean.valueOf(hostnameVerificationEnabled)); } /** * Determines if the hostname should be verified in SSL context. * * @param opts The FileSystemOptions. * @return true if if the FileSystemOptions indicate that HTTP Keep-Alive is respected. */ public boolean isHostnameVerificationEnabled(final FileSystemOptions opts) { return getBoolean(opts, HOSTNAME_VERIFICATION_ENABLED, DEFAULT_HOSTNAME_VERIFICATION_ENABLED); } /** * Sets the enabled TLS versions as a comma separated string, each token of which is the name of * <code>org.apache.hc.core5.http.ssl.TLS</code> enum. e.g, "V_1_2, V_1_3". * * @param opts the file system options to modify * @param tlsVersions enabled TLS versions */ public void setTlsVersions(final FileSystemOptions opts, final String tlsVersions) { setParam(opts, KEY_TLS_VERSIONS, tlsVersions); } /** * Gets the enabled TLS versions as a comma separated string, each token of which is the name of * <code>org.apache.hc.core5.http.ssl.TLS</code> enum. e.g, "V_1_2, V_1_3". * * @param opts the file system options to modify * @return enabled TLS versions */ public String getTlsVersions(final FileSystemOptions opts) { final String tlsVersions = (String) getParam(opts, KEY_TLS_VERSIONS); return tlsVersions != null ? tlsVersions : DEFAULT_TLS_VERSIONS; } @Override protected Class<? extends FileSystem> getConfigClass() { return Http5FileSystem.class; } }
/** * Copyright (C) 2016 - present McLeod Moores Software Limited. All rights reserved. */ package com.mcleodmoores.starling.client.portfolio.fpml5_8; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Set; import org.joda.beans.Bean; import org.joda.beans.BeanDefinition; import org.joda.beans.ImmutableBean; import org.joda.beans.ImmutableValidator; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectFieldsBeanBuilder; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import org.threeten.bp.LocalDate; import com.opengamma.util.money.Currency; /** * An object representing a FX leg defined by the value date and either one payment and an exchange rate, or two payments. * In the first case, the second payment is constructed using the exchange rate and first payment with the counterparties * reversed. * <p> * For this object to be valid, the following conditions must apply: * <ul> * <li> If set, the counterparties of the two payments must match i.e. the payer of the first exchange must be the * receiver of the second exchange and vice versa. * <li> If the exchange rate is set, the currencies of the payments must match those of the exchange rate. * <li> If both payments and the exchange rate are set, the exchange rate implied by the payments must match the supplied rate * to within 6 decimal places. * </ul> */ //TODO if value date is not set, use trade date + conventions to work out value date? @BeanDefinition public class FxSingleLeg implements ImmutableBean { /** * The first payment. */ @PropertyDefinition(validate = "notNull") private final ExchangedCurrency _exchangedCurrency1; /** * The second payment. If not set, this value is implied from the first payment and the exchange rate. */ @PropertyDefinition(get = "manual") private final ExchangedCurrency _exchangedCurrency2; /** * The value date of the trade. */ @PropertyDefinition(validate = "notNull") private final LocalDate _valueDate; /** * The exchange rate of the trade. */ @PropertyDefinition(get = "manual") private final ExchangeRate _exchangeRate; /** * The effective exchange rate of the trade. This might be calculated from the two payments. */ private ExchangeRate _effectiveExchangeRate; /** * The effective second payment. This might be calculated from the first payment and exchange rate. */ private ExchangedCurrency _effectiveExchangedCurrency2; /** * Validates this leg. */ @ImmutableValidator private void validate() { final BigDecimal paymentAmount1 = _exchangedCurrency1.getPaymentAmount().getAmount(); if (_exchangedCurrency2 != null) { _effectiveExchangedCurrency2 = _exchangedCurrency2; // check that the payer and receiver references on both legs match // using Objects.equals to handle null counterparties if (!Objects.equals(_exchangedCurrency1.getPayerPartyReference(), _exchangedCurrency2.getReceiverPartyReference())) { throw new IllegalStateException("Payer reference 1 " + _exchangedCurrency1.getPayerPartyReference() + " does not match receiver reference 2 " + _exchangedCurrency2.getReceiverPartyReference()); } if (!Objects.equals(_exchangedCurrency1.getReceiverPartyReference(), _exchangedCurrency2.getPayerPartyReference())) { throw new IllegalStateException("Payer reference 2 " + _exchangedCurrency2.getPayerPartyReference() + " does not match payer reference 1 " + _exchangedCurrency1.getReceiverPartyReference()); } if (_exchangeRate != null) { // check for consistent currencies and exchange rates final BigDecimal paymentAmount2 = _exchangedCurrency2.getPaymentAmount().getAmount(); // check that the currencies are consistent final Currency paymentCurrency1 = _exchangedCurrency1.getPaymentAmount().getCurrency(); final Currency paymentCurrency2 = _exchangedCurrency2.getPaymentAmount().getCurrency(); final Currency exchangeRateCurrency1 = _exchangeRate.getQuotedCurrencyPair().getCurrency1(); final Currency exchangeRateCurrency2 = _exchangeRate.getQuotedCurrencyPair().getCurrency2(); if (!(paymentCurrency1.equals(exchangeRateCurrency1) && paymentCurrency2.equals(exchangeRateCurrency2) || paymentCurrency1.equals(exchangeRateCurrency2) && paymentCurrency2.equals(exchangeRateCurrency1))) { throw new IllegalStateException("Inconsistent exchanged currencies and exchange rate: have (" + paymentCurrency1 + ", " + paymentCurrency2 + ") and (" + exchangeRateCurrency1 + ", " + exchangeRateCurrency2 + ")"); } final BigDecimal impliedRate6dp, rate6dp; // get the exchange rate in CCY1/CCY2 form and round switch (_exchangeRate.getQuotedCurrencyPair().getQuoteBasis()) { case CURRENCY2_PER_CURRENCY1: rate6dp = _exchangeRate.getRate().setScale(6, RoundingMode.HALF_DOWN); break; case CURRENCY1_PER_CURRENCY2: rate6dp = BigDecimal.valueOf(1 / _exchangeRate.getRate().doubleValue()).setScale(6, RoundingMode.HALF_DOWN); break; default: throw new IllegalStateException("Unrecognized quote basis " + _exchangeRate.getQuotedCurrencyPair().getQuoteBasis()); } if (paymentCurrency1.equals(exchangeRateCurrency1)) { impliedRate6dp = BigDecimal.valueOf(paymentAmount2.doubleValue() / paymentAmount1.doubleValue()).setScale(6, RoundingMode.HALF_UP); } else { impliedRate6dp = BigDecimal.valueOf(paymentAmount1.doubleValue() / paymentAmount2.doubleValue()).setScale(6, RoundingMode.HALF_UP); } if (rate6dp.compareTo(impliedRate6dp) != 0) { throw new IllegalStateException("Implied rate " + impliedRate6dp + " does not match provided rate " + rate6dp + " for " + exchangeRateCurrency1 + "/" + exchangeRateCurrency2); } _effectiveExchangeRate = _exchangeRate; } else { // construct the exchange rate final BigDecimal paymentAmount2 = _exchangedCurrency2.getPaymentAmount().getAmount(); final BigDecimal impliedRate = BigDecimal.valueOf(paymentAmount2.doubleValue() / paymentAmount1.doubleValue()); final QuotedCurrencyPair impliedQuotedCurrencyPair = QuotedCurrencyPair.builder() .currency1(_exchangedCurrency1.getPaymentAmount().getCurrency()) .currency2(_exchangedCurrency2.getPaymentAmount().getCurrency()) .quoteBasis(QuoteBasis.CURRENCY2_PER_CURRENCY1) .build(); _effectiveExchangeRate = ExchangeRate.builder() .rate(impliedRate) .quotedCurrencyPair(impliedQuotedCurrencyPair) .build(); } } else if (_exchangeRate != null) { _effectiveExchangeRate = _exchangeRate; // construct the second payment final Currency exchangeRateCurrency1 = _exchangeRate.getQuotedCurrencyPair().getCurrency1(); final Currency exchangeRateCurrency2 = _exchangeRate.getQuotedCurrencyPair().getCurrency2(); final Currency paymentCurrency = _exchangedCurrency1.getPaymentAmount().getCurrency(); final PaymentAmount.Builder paymentBuilder = PaymentAmount.builder(); final double exchangeRate; // get rate as currency2 / currency1 switch (_exchangeRate.getQuotedCurrencyPair().getQuoteBasis()) { case CURRENCY1_PER_CURRENCY2: exchangeRate = 1 / _exchangeRate.getRate().doubleValue(); break; case CURRENCY2_PER_CURRENCY1: exchangeRate = _exchangeRate.getRate().doubleValue(); break; default: throw new IllegalStateException("Unrecognized quote basis " + _exchangeRate.getQuotedCurrencyPair().getQuoteBasis()); } final ExchangedCurrency.Builder exchangedCurrencyBuilder = ExchangedCurrency.builder() .payerPartyReference(_exchangedCurrency1.getReceiverPartyReference()) .receiverPartyReference(_exchangedCurrency1.getPayerPartyReference()); if (paymentCurrency.equals(exchangeRateCurrency1)) { paymentBuilder.currency(exchangeRateCurrency2) .amount(BigDecimal.valueOf(_exchangedCurrency1.getPaymentAmount().getAmount().doubleValue() * exchangeRate)); } else if (paymentCurrency.equals(exchangeRateCurrency2)) { paymentBuilder.currency(exchangeRateCurrency1) .amount(BigDecimal.valueOf(_exchangedCurrency1.getPaymentAmount().getAmount().doubleValue() / exchangeRate)); } else { throw new IllegalStateException("Exchange rate currencies (" + exchangeRateCurrency1 + ", " + exchangeRateCurrency2 + " not compatible with first payment currency " + paymentCurrency); } _effectiveExchangedCurrency2 = exchangedCurrencyBuilder.paymentAmount(paymentBuilder.build()).build(); } else { throw new IllegalStateException("Must set either the exchangeRate or exchangedCurrency2 fields"); } } /** * Gets the second payment. If not set, this value is implied from the first payment and the exchange rate. * @return the value of the property */ public ExchangedCurrency getExchangedCurrency2() { return _effectiveExchangedCurrency2; } /** * Gets the exchange rate of the trade. * @return the value of the property */ public ExchangeRate getExchangeRate() { return _effectiveExchangeRate; } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code FxSingleLeg}. * @return the meta-bean, not null */ public static FxSingleLeg.Meta meta() { return FxSingleLeg.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(FxSingleLeg.Meta.INSTANCE); } /** * Returns a builder used to create an instance of the bean. * @return the builder, not null */ public static FxSingleLeg.Builder builder() { return new FxSingleLeg.Builder(); } /** * Restricted constructor. * @param builder the builder to copy from, not null */ protected FxSingleLeg(FxSingleLeg.Builder builder) { JodaBeanUtils.notNull(builder._exchangedCurrency1, "exchangedCurrency1"); JodaBeanUtils.notNull(builder._valueDate, "valueDate"); this._exchangedCurrency1 = builder._exchangedCurrency1; this._exchangedCurrency2 = builder._exchangedCurrency2; this._valueDate = builder._valueDate; this._exchangeRate = builder._exchangeRate; validate(); } @Override public FxSingleLeg.Meta metaBean() { return FxSingleLeg.Meta.INSTANCE; } @Override public <R> Property<R> property(String propertyName) { return metaBean().<R>metaProperty(propertyName).createProperty(this); } @Override public Set<String> propertyNames() { return metaBean().metaPropertyMap().keySet(); } //----------------------------------------------------------------------- /** * Gets the first payment. * @return the value of the property, not null */ public ExchangedCurrency getExchangedCurrency1() { return _exchangedCurrency1; } //----------------------------------------------------------------------- /** * Gets the value date of the trade. * @return the value of the property, not null */ public LocalDate getValueDate() { return _valueDate; } //----------------------------------------------------------------------- /** * Returns a builder that allows this bean to be mutated. * @return the mutable builder, not null */ public Builder toBuilder() { return new Builder(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { FxSingleLeg other = (FxSingleLeg) obj; return JodaBeanUtils.equal(getExchangedCurrency1(), other.getExchangedCurrency1()) && JodaBeanUtils.equal(getExchangedCurrency2(), other.getExchangedCurrency2()) && JodaBeanUtils.equal(getValueDate(), other.getValueDate()) && JodaBeanUtils.equal(getExchangeRate(), other.getExchangeRate()); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash = hash * 31 + JodaBeanUtils.hashCode(getExchangedCurrency1()); hash = hash * 31 + JodaBeanUtils.hashCode(getExchangedCurrency2()); hash = hash * 31 + JodaBeanUtils.hashCode(getValueDate()); hash = hash * 31 + JodaBeanUtils.hashCode(getExchangeRate()); return hash; } @Override public String toString() { StringBuilder buf = new StringBuilder(160); buf.append("FxSingleLeg{"); int len = buf.length(); toString(buf); if (buf.length() > len) { buf.setLength(buf.length() - 2); } buf.append('}'); return buf.toString(); } protected void toString(StringBuilder buf) { buf.append("exchangedCurrency1").append('=').append(JodaBeanUtils.toString(getExchangedCurrency1())).append(',').append(' '); buf.append("exchangedCurrency2").append('=').append(JodaBeanUtils.toString(getExchangedCurrency2())).append(',').append(' '); buf.append("valueDate").append('=').append(JodaBeanUtils.toString(getValueDate())).append(',').append(' '); buf.append("exchangeRate").append('=').append(JodaBeanUtils.toString(getExchangeRate())).append(',').append(' '); } //----------------------------------------------------------------------- /** * The meta-bean for {@code FxSingleLeg}. */ public static class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code exchangedCurrency1} property. */ private final MetaProperty<ExchangedCurrency> _exchangedCurrency1 = DirectMetaProperty.ofImmutable( this, "exchangedCurrency1", FxSingleLeg.class, ExchangedCurrency.class); /** * The meta-property for the {@code exchangedCurrency2} property. */ private final MetaProperty<ExchangedCurrency> _exchangedCurrency2 = DirectMetaProperty.ofImmutable( this, "exchangedCurrency2", FxSingleLeg.class, ExchangedCurrency.class); /** * The meta-property for the {@code valueDate} property. */ private final MetaProperty<LocalDate> _valueDate = DirectMetaProperty.ofImmutable( this, "valueDate", FxSingleLeg.class, LocalDate.class); /** * The meta-property for the {@code exchangeRate} property. */ private final MetaProperty<ExchangeRate> _exchangeRate = DirectMetaProperty.ofImmutable( this, "exchangeRate", FxSingleLeg.class, ExchangeRate.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, null, "exchangedCurrency1", "exchangedCurrency2", "valueDate", "exchangeRate"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 219734175: // exchangedCurrency1 return _exchangedCurrency1; case 219734176: // exchangedCurrency2 return _exchangedCurrency2; case -766192449: // valueDate return _valueDate; case 1429636515: // exchangeRate return _exchangeRate; } return super.metaPropertyGet(propertyName); } @Override public FxSingleLeg.Builder builder() { return new FxSingleLeg.Builder(); } @Override public Class<? extends FxSingleLeg> beanType() { return FxSingleLeg.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code exchangedCurrency1} property. * @return the meta-property, not null */ public final MetaProperty<ExchangedCurrency> exchangedCurrency1() { return _exchangedCurrency1; } /** * The meta-property for the {@code exchangedCurrency2} property. * @return the meta-property, not null */ public final MetaProperty<ExchangedCurrency> exchangedCurrency2() { return _exchangedCurrency2; } /** * The meta-property for the {@code valueDate} property. * @return the meta-property, not null */ public final MetaProperty<LocalDate> valueDate() { return _valueDate; } /** * The meta-property for the {@code exchangeRate} property. * @return the meta-property, not null */ public final MetaProperty<ExchangeRate> exchangeRate() { return _exchangeRate; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 219734175: // exchangedCurrency1 return ((FxSingleLeg) bean).getExchangedCurrency1(); case 219734176: // exchangedCurrency2 return ((FxSingleLeg) bean).getExchangedCurrency2(); case -766192449: // valueDate return ((FxSingleLeg) bean).getValueDate(); case 1429636515: // exchangeRate return ((FxSingleLeg) bean).getExchangeRate(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { metaProperty(propertyName); if (quiet) { return; } throw new UnsupportedOperationException("Property cannot be written: " + propertyName); } } //----------------------------------------------------------------------- /** * The bean-builder for {@code FxSingleLeg}. */ public static class Builder extends DirectFieldsBeanBuilder<FxSingleLeg> { private ExchangedCurrency _exchangedCurrency1; private ExchangedCurrency _exchangedCurrency2; private LocalDate _valueDate; private ExchangeRate _exchangeRate; /** * Restricted constructor. */ protected Builder() { } /** * Restricted copy constructor. * @param beanToCopy the bean to copy from, not null */ protected Builder(FxSingleLeg beanToCopy) { this._exchangedCurrency1 = beanToCopy.getExchangedCurrency1(); this._exchangedCurrency2 = beanToCopy.getExchangedCurrency2(); this._valueDate = beanToCopy.getValueDate(); this._exchangeRate = beanToCopy.getExchangeRate(); } //----------------------------------------------------------------------- @Override public Object get(String propertyName) { switch (propertyName.hashCode()) { case 219734175: // exchangedCurrency1 return _exchangedCurrency1; case 219734176: // exchangedCurrency2 return _exchangedCurrency2; case -766192449: // valueDate return _valueDate; case 1429636515: // exchangeRate return _exchangeRate; default: throw new NoSuchElementException("Unknown property: " + propertyName); } } @Override public Builder set(String propertyName, Object newValue) { switch (propertyName.hashCode()) { case 219734175: // exchangedCurrency1 this._exchangedCurrency1 = (ExchangedCurrency) newValue; break; case 219734176: // exchangedCurrency2 this._exchangedCurrency2 = (ExchangedCurrency) newValue; break; case -766192449: // valueDate this._valueDate = (LocalDate) newValue; break; case 1429636515: // exchangeRate this._exchangeRate = (ExchangeRate) newValue; break; default: throw new NoSuchElementException("Unknown property: " + propertyName); } return this; } @Override public Builder set(MetaProperty<?> property, Object value) { super.set(property, value); return this; } @Override public Builder setString(String propertyName, String value) { setString(meta().metaProperty(propertyName), value); return this; } @Override public Builder setString(MetaProperty<?> property, String value) { super.setString(property, value); return this; } @Override public Builder setAll(Map<String, ? extends Object> propertyValueMap) { super.setAll(propertyValueMap); return this; } @Override public FxSingleLeg build() { return new FxSingleLeg(this); } //----------------------------------------------------------------------- /** * Sets the first payment. * @param exchangedCurrency1 the new value, not null * @return this, for chaining, not null */ public Builder exchangedCurrency1(ExchangedCurrency exchangedCurrency1) { JodaBeanUtils.notNull(exchangedCurrency1, "exchangedCurrency1"); this._exchangedCurrency1 = exchangedCurrency1; return this; } /** * Sets the second payment. If not set, this value is implied from the first payment and the exchange rate. * @param exchangedCurrency2 the new value * @return this, for chaining, not null */ public Builder exchangedCurrency2(ExchangedCurrency exchangedCurrency2) { this._exchangedCurrency2 = exchangedCurrency2; return this; } /** * Sets the value date of the trade. * @param valueDate the new value, not null * @return this, for chaining, not null */ public Builder valueDate(LocalDate valueDate) { JodaBeanUtils.notNull(valueDate, "valueDate"); this._valueDate = valueDate; return this; } /** * Sets the exchange rate of the trade. * @param exchangeRate the new value * @return this, for chaining, not null */ public Builder exchangeRate(ExchangeRate exchangeRate) { this._exchangeRate = exchangeRate; return this; } //----------------------------------------------------------------------- @Override public String toString() { StringBuilder buf = new StringBuilder(160); buf.append("FxSingleLeg.Builder{"); int len = buf.length(); toString(buf); if (buf.length() > len) { buf.setLength(buf.length() - 2); } buf.append('}'); return buf.toString(); } protected void toString(StringBuilder buf) { buf.append("exchangedCurrency1").append('=').append(JodaBeanUtils.toString(_exchangedCurrency1)).append(',').append(' '); buf.append("exchangedCurrency2").append('=').append(JodaBeanUtils.toString(_exchangedCurrency2)).append(',').append(' '); buf.append("valueDate").append('=').append(JodaBeanUtils.toString(_valueDate)).append(',').append(' '); buf.append("exchangeRate").append('=').append(JodaBeanUtils.toString(_exchangeRate)).append(',').append(' '); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
/* * Copyright 2000-2013 JetBrains s.r.o. * Copyright 2014-2015 AS3Boyan * Copyright 2014-2014 Elias Ku * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.plugins.haxe.model; import com.intellij.plugins.haxe.HaxeComponentType; import com.intellij.plugins.haxe.lang.psi.*; import com.intellij.plugins.haxe.model.type.HaxeTypeResolver; import com.intellij.plugins.haxe.model.type.ResultHolder; import com.intellij.plugins.haxe.model.type.SpecificHaxeClassReference; import com.intellij.plugins.haxe.model.type.SpecificTypeReference; import com.intellij.plugins.haxe.util.UsefulPsiTreeUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiIdentifier; import org.apache.commons.lang.NotImplementedException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; public class HaxeClassModel { public HaxeClass haxeClass; public HaxeClassModel(HaxeClass haxeClass) { this.haxeClass = haxeClass; } public HaxeClassReferenceModel getParentClassReference() { List<HaxeType> list = haxeClass.getHaxeExtendsList(); if (list.size() == 0) return null; return new HaxeClassReferenceModel(list.get(0)); } static public boolean isValidClassName(String name) { return name.substring(0, 1).equals(name.substring(0, 1).toUpperCase()); } public HaxeClassModel getParentClass() { final HaxeClassReferenceModel reference = this.getParentClassReference(); return (reference != null) ? reference.getHaxeClass() : null; } public List<HaxeClassReferenceModel> getInterfaceExtendingInterfaces() { List<HaxeType> list = haxeClass.getHaxeExtendsList(); List<HaxeClassReferenceModel> out = new ArrayList<HaxeClassReferenceModel>(); for (HaxeType type : list) { out.add(new HaxeClassReferenceModel(type)); } return out; } public List<HaxeClassReferenceModel> getImplementingInterfaces() { List<HaxeType> list = haxeClass.getHaxeImplementsList(); List<HaxeClassReferenceModel> out = new ArrayList<HaxeClassReferenceModel>(); for (HaxeType type : list) { out.add(new HaxeClassReferenceModel(type)); } return out; } public boolean isExtern() { return haxeClass.isExtern(); } public boolean isClass() { return !this.isAbstract() && (HaxeComponentType.typeOf(haxeClass) == HaxeComponentType.CLASS); } public boolean isInterface() { return HaxeComponentType.typeOf(haxeClass) == HaxeComponentType.INTERFACE; } public boolean isEnum() { return HaxeComponentType.typeOf(haxeClass) == HaxeComponentType.ENUM; } public boolean isTypedef() { return HaxeComponentType.typeOf(haxeClass) == HaxeComponentType.TYPEDEF; } public boolean isAbstract() { return haxeClass instanceof HaxeAbstractClassDeclaration; } // @TODO: Create AbstractHaxeClassModel extending this class for these methods? // @TODO: this should be properly parsed in haxe.bnf so searching for the underlying type is not required @Nullable public HaxeTypeOrAnonymous getAbstractUnderlyingType() { if (!isAbstract()) return null; PsiElement[] children = getPsi().getChildren(); // FIX: support list of metas before ComponentName for(int i = 0; i < children.length; ++i) { PsiElement child = children[i]; if(child instanceof HaxeComponentName) { if(i + 1 < children.length) { child = children[i + 1]; if(child instanceof HaxeTypeOrAnonymous) { return (HaxeTypeOrAnonymous)child; } } break; } } return null; } // @TODO: this should be properly parsed in haxe.bnf so searching for to is not required public List<HaxeType> getAbstractToList() { if (!isAbstract()) return Collections.emptyList(); List<HaxeType> types = new LinkedList<HaxeType>(); for (HaxeIdentifier id : UsefulPsiTreeUtil.getChildren(haxeClass, HaxeIdentifier.class)) { if (id.getText().equals("to")) { PsiElement sibling = UsefulPsiTreeUtil.getNextSiblingNoSpaces(id); if (sibling instanceof HaxeType) { types.add((HaxeType)sibling); } } } return types; } // @TODO: this should be properly parsed in haxe.bnf so searching for from is not required public List<HaxeType> getAbstractFromList() { if (!isAbstract()) return Collections.emptyList(); List<HaxeType> types = new LinkedList<HaxeType>(); for (HaxeIdentifier id : UsefulPsiTreeUtil.getChildren(haxeClass, HaxeIdentifier.class)) { if (id.getText().equals("from")) { PsiElement sibling = UsefulPsiTreeUtil.getNextSiblingNoSpaces(id); if (sibling instanceof HaxeType) { types.add((HaxeType)sibling); } } } return types; } public boolean hasMethod(String name) { return getMethod(name) != null; } public boolean hasMethodSelf(String name) { HaxeMethodModel method = getMethod(name); if (method == null) return false; return (method.getDeclaringClass() == this); } public HaxeMethodModel getMethodSelf(String name) { HaxeMethodModel method = getMethod(name); if (method == null) return null; return (method.getDeclaringClass() == this) ? method : null; } public HaxeMethodModel getConstructorSelf() { return getMethodSelf("new"); } public HaxeMethodModel getConstructor() { return getMethod("new"); } public boolean hasConstructor() { return getConstructor() != null; } public HaxeMethodModel getParentConstructor() { HaxeClassReferenceModel parentClass = getParentClassReference(); if (parentClass == null) return null; return parentClass.getHaxeClass().getMethod("new"); } public HaxeMemberModel getMember(String name) { final HaxeMethodModel method = getMethod(name); final HaxeFieldModel field = getField(name); return (method != null) ? method : field; } public List<HaxeMemberModel> getMembers() { LinkedList<HaxeMemberModel> members = new LinkedList<HaxeMemberModel>(); for (HaxeMethodModel method : getMethods()) members.add(method); for (HaxeFieldModel field : getFields()) members.add(field); return members; } @NotNull public List<HaxeMemberModel> getMembersSelf() { LinkedList<HaxeMemberModel> members = new LinkedList<HaxeMemberModel>(); HaxeClassBody body = UsefulPsiTreeUtil.getChild(haxeClass, HaxeClassBody.class); if (body != null) { for (PsiElement element : body.getChildren()) { if (element instanceof HaxeMethod || element instanceof HaxeVarDeclaration) { HaxeMemberModel model = HaxeMemberModel.fromPsi(element); if (model != null) { members.add(model); } } } } return members; } public HaxeFieldModel getField(String name) { HaxeVarDeclaration name1 = (HaxeVarDeclaration)haxeClass.findHaxeFieldByName(name); return name1 != null ? new HaxeFieldModel(name1) : null; } public HaxeMethodModel getMethod(String name) { HaxeMethodPsiMixin name1 = (HaxeMethodPsiMixin)haxeClass.findHaxeMethodByName(name); return name1 != null ? name1.getModel() : null; } public List<HaxeMethodModel> getMethods() { List<HaxeMethodModel> models = new ArrayList<HaxeMethodModel>(); for (HaxeMethod method : haxeClass.getHaxeMethods()) { models.add(method.getModel()); } return models; } public List<HaxeMethodModel> getMethodsSelf() { List<HaxeMethodModel> models = new ArrayList<HaxeMethodModel>(); for (HaxeMethod method : haxeClass.getHaxeMethods()) { if (method.getContainingClass() == this.haxeClass) models.add(method.getModel()); } return models; } public List<HaxeMethodModel> getAncestorMethods() { List<HaxeMethodModel> models = new ArrayList<HaxeMethodModel>(); for (HaxeMethod method : haxeClass.getHaxeMethods()) { if (method.getContainingClass() != this.haxeClass) models.add(method.getModel()); } return models; } @NotNull public HaxeClass getPsi() { return haxeClass; } @Nullable public HaxeClassBody getBodyPsi() { return (haxeClass instanceof HaxeClassDeclaration) ? ((HaxeClassDeclaration)haxeClass).getClassBody() : null; } @Nullable public PsiIdentifier getNamePsi() { return haxeClass.getNameIdentifier(); } private HaxeDocumentModel _document = null; @NotNull public HaxeDocumentModel getDocument() { if (_document == null) _document = new HaxeDocumentModel(haxeClass); return _document; } public String getName() { return haxeClass.getName(); } public void addMethodsFromPrototype(List<HaxeMethodModel> methods) { throw new NotImplementedException("Not implemented HaxeClassMethod.addMethodsFromPrototype() : check HaxeImplementMethodHandler"); } public List<HaxeFieldModel> getFields() { HaxeClassBody body = UsefulPsiTreeUtil.getChild(haxeClass, HaxeClassBody.class); LinkedList<HaxeFieldModel> out = new LinkedList<HaxeFieldModel>(); if (body != null) { for (HaxeVarDeclaration declaration : UsefulPsiTreeUtil.getChildren(body, HaxeVarDeclaration.class)) { out.add(new HaxeFieldModel(declaration)); } } return out; } public List<HaxeFieldModel> getFieldsSelf() { HaxeClassBody body = UsefulPsiTreeUtil.getChild(haxeClass, HaxeClassBody.class); LinkedList<HaxeFieldModel> out = new LinkedList<HaxeFieldModel>(); if (body != null) { for (HaxeVarDeclaration declaration : UsefulPsiTreeUtil.getChildren(body, HaxeVarDeclaration.class)) { if (declaration.getContainingClass() == this.haxeClass) { out.add(new HaxeFieldModel(declaration)); } } } return out; } public Set<HaxeClassModel> getCompatibleTypes() { final Set<HaxeClassModel> output = new LinkedHashSet<HaxeClassModel>(); writeCompatibleTypes(output); return output; } public void writeCompatibleTypes(Set<HaxeClassModel> output) { // Own output.add(this); final HaxeClassModel parentClass = this.getParentClass(); // Parent classes if (parentClass != null) { if (!output.contains(parentClass)) { parentClass.writeCompatibleTypes(output); } } // Interfaces for (HaxeClassReferenceModel model : this.getImplementingInterfaces()) { if (model == null) continue; final HaxeClassModel aInterface = model.getHaxeClass(); if (aInterface == null) continue; if (!output.contains(aInterface)) { aInterface.writeCompatibleTypes(output); } } // @CHECK abstract FROM for (HaxeType type : getAbstractFromList()) { final ResultHolder aTypeRef = HaxeTypeResolver.getTypeFromType(type); SpecificHaxeClassReference classType = aTypeRef.getClassType(); if (classType != null) { classType.getHaxeClassModel().writeCompatibleTypes(output); } } // @CHECK abstract TO for (HaxeType type : getAbstractToList()) { final ResultHolder aTypeRef = HaxeTypeResolver.getTypeFromType(type); SpecificHaxeClassReference classType = aTypeRef.getClassType(); if (classType != null) { classType.getHaxeClassModel().writeCompatibleTypes(output); } } } public List<HaxeGenericParamModel> getGenericParams() { List<HaxeGenericParamModel> out = new LinkedList<HaxeGenericParamModel>(); if (getPsi().getGenericParam() != null) { int index = 0; for (HaxeGenericListPart part : getPsi().getGenericParam().getGenericListPartList()) { out.add(new HaxeGenericParamModel(part, index)); index++; } } return out; } public void addField(String name, SpecificTypeReference type) { this.getDocument().addTextAfterElement(getBodyPsi(), "\npublic var " + name + ":" + type.toStringWithoutConstant() + ";\n"); } public void addMethod(String name) { this.getDocument().addTextAfterElement(getBodyPsi(), "\npublic function " + name + "() {\n}\n"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.commons.cnd; import java.io.IOException; import java.io.Reader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.jcr.NamespaceRegistry; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.UnsupportedRepositoryOperationException; import javax.jcr.ValueFactory; import javax.jcr.Workspace; import javax.jcr.nodetype.ConstraintViolationException; import javax.jcr.nodetype.InvalidNodeTypeDefinitionException; import javax.jcr.nodetype.NodeDefinitionTemplate; import javax.jcr.nodetype.NodeType; import javax.jcr.nodetype.NodeTypeDefinition; import javax.jcr.nodetype.NodeTypeExistsException; import javax.jcr.nodetype.NodeTypeIterator; import javax.jcr.nodetype.NodeTypeManager; import javax.jcr.nodetype.NodeTypeTemplate; import javax.jcr.nodetype.PropertyDefinitionTemplate; import static org.apache.jackrabbit.JcrConstants.NT_BASE; /** * Utility class for importing compact node type definitions. * @see CompactNodeTypeDefReader * @see TemplateBuilderFactory */ public final class CndImporter { private CndImporter() { super(); } /** * Shortcut for * <pre> * registerNodeTypes(cnd, "cnd input stream", wsp.getNodeTypeManager(), * wsp.getNamespaceRegistry(), session.getValueFactory(), false); * </pre> * where <code>wsp</code> is the workspace of the <code>session</code> passed. * @see #registerNodeTypes(Reader, String, NodeTypeManager, NamespaceRegistry, ValueFactory, boolean) * @param cnd * @param session the session to use for registering the node types * @return the registered node types * * @throws InvalidNodeTypeDefinitionException * @throws NodeTypeExistsException * @throws UnsupportedRepositoryOperationException * @throws ParseException * @throws RepositoryException * @throws IOException */ public static NodeType[] registerNodeTypes(Reader cnd, Session session) throws InvalidNodeTypeDefinitionException, NodeTypeExistsException, UnsupportedRepositoryOperationException, ParseException, RepositoryException, IOException { Workspace wsp = session.getWorkspace(); return registerNodeTypes(cnd, "cnd input stream", wsp.getNodeTypeManager(), wsp.getNamespaceRegistry(), session.getValueFactory(), false); } /** * Shortcut for * <pre> * registerNodeTypes(cnd, "cnd input stream", wsp.getNodeTypeManager(), * wsp.getNamespaceRegistry(), session.getValueFactory(), reregisterExisting); * </pre> * where <code>wsp</code> is the workspace of the <code>session</code> passed. * @see #registerNodeTypes(Reader, String, NodeTypeManager, NamespaceRegistry, ValueFactory, boolean) * @param cnd * @param session the session to use for registering the node types * @param reregisterExisting <code>true</code> if existing node types should be re-registered * with those present in the cnd. <code>false</code> otherwise. * @return the registered node types * * @throws InvalidNodeTypeDefinitionException * @throws NodeTypeExistsException * @throws UnsupportedRepositoryOperationException * @throws ParseException * @throws RepositoryException * @throws IOException */ public static NodeType[] registerNodeTypes(Reader cnd, Session session, boolean reregisterExisting) throws InvalidNodeTypeDefinitionException, NodeTypeExistsException, UnsupportedRepositoryOperationException, ParseException, RepositoryException, IOException { Workspace wsp = session.getWorkspace(); return registerNodeTypes(cnd, "cnd input stream", wsp.getNodeTypeManager(), wsp.getNamespaceRegistry(), session.getValueFactory(), reregisterExisting); } /** * Registers nodetypes in <code>cnd</code> format. * @param cnd a reader to the cnd. The reader is closed on return. * @param systemId a informative id of the given cnd input. * @param nodeTypeManager the {@link NodeTypeManager} used for creating and registering the * {@link NodeTypeTemplate}s, {@link NodeDefinitionTemplate}s and {@link PropertyDefinitionTemplate}s * defined in the cnd. * @param namespaceRegistry the {@link NamespaceRegistry} used for registering namespaces defined in * the cnd. * @param valueFactory the {@link ValueFactory} used to create * {@link PropertyDefinitionTemplate#setDefaultValues(javax.jcr.Value[]) default value(s)}. * @param reregisterExisting <code>true</code> if existing node types should be re-registered * with those present in the cnd. <code>false</code> otherwise. * @return the registered node types * * @throws ParseException if the cnd cannot be parsed * @throws InvalidNodeTypeDefinitionException if a <code>NodeTypeDefinition</code> is invalid. * @throws NodeTypeExistsException if <code>reregisterExisting</code> is <code>false</code> and a * <code>NodeTypeDefinition</code> specifies a node type name that is already registered. * @throws UnsupportedRepositoryOperationException if the <code>NodeTypeManager</code> does not * support node type registration. * @throws IOException if closing the cnd reader fails * @throws RepositoryException if another error occurs. */ public static NodeType[] registerNodeTypes(Reader cnd, String systemId, NodeTypeManager nodeTypeManager, NamespaceRegistry namespaceRegistry, ValueFactory valueFactory, boolean reregisterExisting) throws ParseException, InvalidNodeTypeDefinitionException, NodeTypeExistsException, UnsupportedRepositoryOperationException, RepositoryException, IOException { try { DefinitionBuilderFactory<NodeTypeTemplate, NamespaceRegistry> factory = new TemplateBuilderFactory(nodeTypeManager, valueFactory, namespaceRegistry); CompactNodeTypeDefReader<NodeTypeTemplate, NamespaceRegistry> cndReader = new CompactNodeTypeDefReader<NodeTypeTemplate, NamespaceRegistry>(cnd, systemId, factory); Map<String, NodeTypeTemplate> templates = new HashMap<String, NodeTypeTemplate>(); for (NodeTypeTemplate template : cndReader.getNodeTypeDefinitions()) { templates.put(template.getName(), template); } List<NodeTypeTemplate> toRegister = new ArrayList<NodeTypeTemplate>(templates.size()); for (NodeTypeTemplate ntt : templates.values()) { if (reregisterExisting || !nodeTypeManager.hasNodeType(ntt.getName())) { ensureNtBase(ntt, templates, nodeTypeManager); toRegister.add(ntt); } } NodeTypeIterator registered = nodeTypeManager.registerNodeTypes( toRegister.toArray(new NodeTypeTemplate[toRegister.size()]), true); return toArray(registered); } finally { cnd.close(); } } private static void ensureNtBase(NodeTypeTemplate ntt, Map<String, NodeTypeTemplate> templates, NodeTypeManager nodeTypeManager) throws RepositoryException { if (!ntt.isMixin() && !NT_BASE.equals(ntt.getName())) { String[] supertypes = ntt.getDeclaredSupertypeNames(); if (supertypes.length == 0) { ntt.setDeclaredSuperTypeNames(new String[] {NT_BASE}); } else { // Check whether we need to add the implicit "nt:base" supertype boolean needsNtBase = true; for (String name : supertypes) { NodeTypeDefinition std = templates.get(name); if (std == null) { std = nodeTypeManager.getNodeType(name); } if (std != null && !std.isMixin()) { needsNtBase = false; } } if (needsNtBase) { String[] withNtBase = new String[supertypes.length + 1]; withNtBase[0] = NT_BASE; System.arraycopy(supertypes, 0, withNtBase, 1, supertypes.length); ntt.setDeclaredSuperTypeNames(withNtBase); } } } } // -----------------------------------------------------< private >--- private static NodeType[] toArray(NodeTypeIterator nodeTypes) { ArrayList<NodeType> nts = new ArrayList<NodeType>(); while (nodeTypes.hasNext()) { nts.add(nodeTypes.nextNodeType()); } return nts.toArray(new NodeType[nts.size()]); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.sampler; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.ParseField; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.bucket.BestDocsDeferringCollector; import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector; import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator; import org.elasticsearch.search.aggregations.reducers.Reducer; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import java.io.IOException; import java.util.List; import java.util.Map; /** * Aggregate on only the top-scoring docs on a shard. * * TODO currently the diversity feature of this agg offers only 'script' and * 'field' as a means of generating a de-dup value. In future it would be nice * if users could use any of the "bucket" aggs syntax (geo, date histogram...) * as the basis for generating de-dup values. Their syntax for creating bucket * values would be preferable to users having to recreate this logic in a * 'script' e.g. to turn a datetime in milliseconds into a month key value. */ public class SamplerAggregator extends SingleBucketAggregator { public enum ExecutionMode { MAP(new ParseField("map")) { @Override Aggregator create(String name, AggregatorFactories factories, int shardSize, int maxDocsPerValue, ValuesSource valuesSource, AggregationContext context, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { return new DiversifiedMapSamplerAggregator(name, shardSize, factories, context, parent, reducers, metaData, valuesSource, maxDocsPerValue); } @Override boolean needsGlobalOrdinals() { return false; } }, BYTES_HASH(new ParseField("bytes_hash")) { @Override Aggregator create(String name, AggregatorFactories factories, int shardSize, int maxDocsPerValue, ValuesSource valuesSource, AggregationContext context, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { return new DiversifiedBytesHashSamplerAggregator(name, shardSize, factories, context, parent, reducers, metaData, valuesSource, maxDocsPerValue); } @Override boolean needsGlobalOrdinals() { return false; } }, GLOBAL_ORDINALS(new ParseField("global_ordinals")) { @Override Aggregator create(String name, AggregatorFactories factories, int shardSize, int maxDocsPerValue, ValuesSource valuesSource, AggregationContext context, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { return new DiversifiedOrdinalsSamplerAggregator(name, shardSize, factories, context, parent, reducers, metaData, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, maxDocsPerValue); } @Override boolean needsGlobalOrdinals() { return true; } }; public static ExecutionMode fromString(String value) { for (ExecutionMode mode : values()) { if (mode.parseField.match(value)) { return mode; } } throw new IllegalArgumentException("Unknown `execution_hint`: [" + value + "], expected any of " + values()); } private final ParseField parseField; ExecutionMode(ParseField parseField) { this.parseField = parseField; } abstract Aggregator create(String name, AggregatorFactories factories, int shardSize, int maxDocsPerValue, ValuesSource valuesSource, AggregationContext context, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException; abstract boolean needsGlobalOrdinals(); @Override public String toString() { return parseField.getPreferredName(); } } protected final int shardSize; protected BestDocsDeferringCollector bdd; public SamplerAggregator(String name, int shardSize, AggregatorFactories factories, AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { super(name, factories, aggregationContext, parent, reducers, metaData); this.shardSize = shardSize; } @Override public boolean needsScores() { return true; } @Override public DeferringBucketCollector getDeferringCollector() { bdd = new BestDocsDeferringCollector(shardSize); return bdd; } @Override protected boolean shouldDefer(Aggregator aggregator) { return true; } @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { runDeferredCollections(owningBucketOrdinal); return new InternalSampler(name, bdd == null ? 0 : bdd.getDocCount(), bucketAggregations(owningBucketOrdinal), reducers(), metaData()); } @Override public InternalAggregation buildEmptyAggregation() { return new InternalSampler(name, 0, buildEmptySubAggregations(), reducers(), metaData()); } public static class Factory extends AggregatorFactory { private int shardSize; public Factory(String name, int shardSize) { super(name, InternalSampler.TYPE.name()); this.shardSize = shardSize; } @Override public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { if (collectsFromSingleBucket == false) { return asMultiBucketAggregator(this, context, parent); } return new SamplerAggregator(name, shardSize, factories, context, parent, reducers, metaData); } } public static class DiversifiedFactory extends ValuesSourceAggregatorFactory<ValuesSource> { private int shardSize; private int maxDocsPerValue; private String executionHint; public DiversifiedFactory(String name, int shardSize, String executionHint, ValuesSourceConfig vsConfig, int maxDocsPerValue) { super(name, InternalSampler.TYPE.name(), vsConfig); this.shardSize = shardSize; this.maxDocsPerValue = maxDocsPerValue; this.executionHint = executionHint; } @Override protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { if (collectsFromSingleBucket == false) { return asMultiBucketAggregator(this, context, parent); } if (valuesSource instanceof ValuesSource.Numeric) { return new DiversifiedNumericSamplerAggregator(name, shardSize, factories, context, parent, reducers, metaData, (Numeric) valuesSource, maxDocsPerValue); } if (valuesSource instanceof ValuesSource.Bytes) { ExecutionMode execution = null; if (executionHint != null) { execution = ExecutionMode.fromString(executionHint); } // In some cases using ordinals is just not supported: override // it if(execution==null){ execution = ExecutionMode.GLOBAL_ORDINALS; } if ((execution.needsGlobalOrdinals()) && (!(valuesSource instanceof ValuesSource.Bytes.WithOrdinals))) { execution = ExecutionMode.MAP; } return execution.create(name, factories, shardSize, maxDocsPerValue, valuesSource, context, parent, reducers, metaData); } throw new AggregationExecutionException("Sampler aggregation cannot be applied to field [" + config.fieldContext().field() + "]. It can only be applied to numeric or string fields."); } @Override protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { final UnmappedSampler aggregation = new UnmappedSampler(name, reducers, metaData); return new NonCollectingAggregator(name, aggregationContext, parent, factories, reducers, metaData) { @Override public InternalAggregation buildEmptyAggregation() { return aggregation; } }; } } @Override protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException { if (bdd == null) { throw new AggregationExecutionException("Sampler aggregation must be used with child aggregations."); } return bdd.getLeafCollector(ctx); } }
/* * Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores * CA 94065 USA or visit www.oracle.com if you need additional information or * have any questions. */ package com.sun.lwuit.html; import com.sun.lwuit.Component; import com.sun.lwuit.Display; import com.sun.lwuit.Image; import com.sun.lwuit.Label; import com.sun.lwuit.geom.Dimension; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Enumeration; import java.util.Hashtable; import java.util.Vector; /** * ResourceThreadQueue is a thread queue used to create and manage threads that download images and CSS files that were referred from HTML pages * Was called ImageThreadQueue but name was changed since it now handles CSS as well * * @author Ofir Leitner */ class ResourceThreadQueue { /** * The default number of maximum threads used for image download */ private static int DEFAULT_MAX_THREADS = 2; HTMLComponent htmlC; Vector queue = new Vector(); Vector running = new Vector(); Vector bgImageCompsUnselected = new Vector(); Vector bgImageCompsSelected = new Vector(); Vector bgImageCompsPressed = new Vector(); Hashtable images = new Hashtable(); static int maxThreads = DEFAULT_MAX_THREADS; int threadCount; private int cssCount=-1; // As long as there are no CSS files this stays -1 and thus CSS loading is ignored boolean started; /** * Constructs the queue * * @param htmlC The HTMLComponent this queue belongs to */ ResourceThreadQueue(HTMLComponent htmlC) { this.htmlC=htmlC; } /** * Sets the maximum number of threads to use for image download * If startRunning was already called, this will takes effect in the next page loaded. * * @param threadsNum the maximum number of threads to use for image download */ static void setMaxThreads(int threadsNum) { maxThreads=threadsNum; } /** * Adds the image to the queue * * @param imgLabel The label in which the image should be contained after loaded * @param imageUrl The URL this image should be fetched from */ synchronized void add(Component imgLabel,String imageUrl) { /*if ((HTMLComponent.TABLES_LOCK_SIZE) && (htmlC.curTable!=null)) { downloadImageImmediately(imgLabel, imageUrl, 0); } else {*/ if (started) { throw new IllegalStateException("ResourceThreadQueue already started! stop/cancel first"); } images.put(imgLabel, imageUrl); // Using a hashtable to collect all requests first enables overriding urls for labels (For example in CSS use cases) //} } /** * Adds the image to the queue, to be used as a background image for a component * * @param imgComp The component for which the image should be used after loaded * @param imageUrl The URL this image should be fetched from * @param styles A mask of CSSEngine.STYLE_* values indicating in which styles this background image should be displayed */ synchronized void addBgImage(Component imgComp,String imageUrl,int styles) { if (HTMLComponent.SUPPORT_CSS) { /*if ((HTMLComponent.TABLES_LOCK_SIZE) && (htmlC.curTable!=null)) { downloadImageImmediately(imgComp,imageUrl,styles); } else {*/ add(imgComp,imageUrl); if ((styles & CSSEngine.STYLE_SELECTED)!=0) { bgImageCompsSelected.addElement(imgComp); } if ((styles & CSSEngine.STYLE_UNSELECTED)!=0) { bgImageCompsUnselected.addElement(imgComp); } if ((styles & CSSEngine.STYLE_PRESSED)!=0) { bgImageCompsPressed.addElement(imgComp); } //} } } /** * Downloads the image immediately // It seems that the issue that required this was already solved in the table package * The image is not added to the queue, and not loaded on another thread but rather downloaded on the same thread that builds up the document * This is useful for HTMLCoponent.TABLES_PATCH * * @param imgComp The component for which the image should be used after loaded * @param imageUrl The URL this image should be fetched from * @param styles A mask of CSSEngine.STYLE_* values indicating in which styles this background image should be displayed * synchronized void downloadImageImmediately(Component imgComp,String imageUrl,int styles) { try { InputStream is = htmlC.getRequestHandler().resourceRequested(new DocumentInfo(imageUrl,DocumentInfo.TYPE_IMAGE)); Image img = Image.createImage(is); ResourceThread t = new ResourceThread(imageUrl, imgComp, htmlC, null); t.handleImage(img, imgComp,((styles & CSSEngine.STYLE_UNSELECTED)!=0),((styles & CSSEngine.STYLE_SELECTED)!=0),((styles & CSSEngine.STYLE_PRESSED)!=0)); } catch (Exception ex) { ex.printStackTrace(); } }*/ /** * Adds a stylesheet to the queue * * @param cssUrl The URL this style sheet should be fetched from * @param dom the dom object on which the CSS should be applied */ synchronized void addCSS(String cssUrl,String encoding) { if (started) { throw new IllegalStateException("ResourceThreadQueue alreadey started! stop/cancel first"); } DocumentInfo cssDocInfo=new DocumentInfo(cssUrl, DocumentInfo.TYPE_CSS); if (encoding!=null) { cssDocInfo.setEncoding(encoding); } ResourceThread t = new ResourceThread(cssDocInfo, htmlC, this); queue.addElement(t); incCSSCount(); } /** * Incereases the internal count of the number of pending CSS documents */ private void incCSSCount() { if (cssCount==-1) { // first CSS make sure we bump it from -1 to 1. cssCount++; } cssCount++; } /** * Returns the number of pending CSS documents * * @return the number of pending CSS documents */ synchronized int getCSSCount() { return cssCount; } /** * Returns the queue size, this is relevant only when the queue hasn't started yet * * @return the queue size */ int getQueueSize() { return (images.size()+queue.size()); // CSS files are added directly to queue while images to the images vector //return queue.size(); } /** * Notifies the queue that all images and CSS have been queues and it can start dequeuing and download the images. * The queue isn't started before that to prevent multiple downloads of the same image */ synchronized void startRunning() { if (!startDequeue()) { startRunningImages(); } } /** * Starts downloading the images (This is called only after all CSS files have been downloaded, since they may contain image references) */ synchronized void startRunningImages() { queue.removeAllElements(); Vector urls=new Vector(); for(Enumeration e=images.keys();e.hasMoreElements();) { Component imgComp = (Component)e.nextElement(); String imageUrl = (String)images.get(imgComp); int urlIndex=urls.indexOf(imageUrl); if (urlIndex!=-1) { ResourceThread t=(ResourceThread)queue.elementAt(urlIndex); t.addLabel(imgComp); } else { ResourceThread t = new ResourceThread(imageUrl, imgComp, htmlC, this); queue.addElement(t); urls.addElement(imageUrl); } } urls=null; images=new Hashtable(); if (!startDequeue()) { htmlC.setPageStatus(HTMLCallback.STATUS_COMPLETED); } } /** * Starts dequeuing the queue into the running pool and launch them * * @return true if there are at least one active thread, false otherwise */ private synchronized boolean startDequeue() { int threads=Math.min(queue.size(), maxThreads); for(int i=0;i<threads;i++) { ResourceThread t=(ResourceThread)queue.firstElement(); queue.removeElementAt(0); running.addElement(t); threadCount++; //t.go(); //new Thread(t).start(); } for(Enumeration e=running.elements();e.hasMoreElements();) { ResourceThread t=(ResourceThread)e.nextElement(); t.go(); } return (threads>0); } /** * Called by the ResourceThread when it finishes downloading and setting the image. * This in turns starts another thread if the queue is not empty * * @param finishedThread The calling thread * @param success true if the image download was successful, false otherwise */ synchronized void threadFinished(ResourceThread finishedThread,boolean success) { if(finishedThread.cssDocInfo!=null) { cssCount--; // Reduce the number of waiting CSS, even if reading failed } if ((HTMLComponent.SUPPORT_CSS) && (cssCount==0)) { cssCount=-1; // So it won't get applied again htmlC.applyAllCSS(); htmlC.cssCompleted(); } running.removeElement(finishedThread); if (queue.size()>0) { ResourceThread t=(ResourceThread)queue.firstElement(); queue.removeElementAt(0); running.addElement(t); t.go(); //new Thread(t).start(); } else { threadCount--; } if (threadCount==0) { if (images.size()==0) { htmlC.setPageStatus(HTMLCallback.STATUS_COMPLETED); } else { startRunningImages(); } } } /** * Discards the entire queue and signals the running threads to cancel. * THis will be triggered if the user cancelled the page or moved to another page. */ synchronized void discardQueue() { queue.removeAllElements(); for(Enumeration e=running.elements();e.hasMoreElements();) { ResourceThread t = (ResourceThread)e.nextElement(); t.cancel(); } running.removeAllElements(); bgImageCompsSelected.removeAllElements(); bgImageCompsUnselected.removeAllElements(); bgImageCompsPressed.removeAllElements(); threadCount=0; cssCount=-1; started=false; } /** * Returns a printout of the threads queue, can be used for debugging * * @return a printout of the threads queue */ public String toString() { String str=("---- Running ----\n"); int i=1; for(Enumeration e=running.elements();e.hasMoreElements();) { ResourceThread t = (ResourceThread)e.nextElement(); if (t.imageUrl!=null) { str+="#"+i+": "+t.imageUrl+"\n"; } else { str+="#"+i+": CSS - "+t.cssDocInfo.getUrl()+"\n"; } i++; } i=1; str+="Queue:\n"; for(Enumeration e=queue.elements();e.hasMoreElements();) { ResourceThread t = (ResourceThread)e.nextElement(); if (t.imageUrl!=null) { str+="#"+i+": "+t.imageUrl+"\n"; } else { str+="#"+i+": CSS - "+t.cssDocInfo.getUrl()+"\n"; } i++; } str+="---- count:"+threadCount+" ----\n"; return str; } // Inner classes: /** * An ResourceThread downloads an Image as requested * * @author Ofir Leitner */ class ResourceThread implements Runnable, AsyncDocumentRequestHandler.IOCallback { Component imgLabel; Vector labels; String imageUrl; DocumentRequestHandler handler; ResourceThreadQueue threadQueue; boolean cancelled; HTMLComponent htmlC; Image img; DocumentInfo cssDocInfo; /** * Constructs the ResourceThread for an image file * * @param imgLabel The label in which the image should be contained after loaded * @param imageUrl The URL this image should be fetched from * @param handler The RequestHandler through which to retrieve the image * @param threadQueue The main queue, for callback purposes */ ResourceThread(String imageUrl, Component imgLabel,HTMLComponent htmlC,ResourceThreadQueue threadQueue) { this.imageUrl=imageUrl; this.imgLabel=imgLabel; this.handler=htmlC.getRequestHandler(); this.threadQueue=threadQueue; this.htmlC=htmlC; } /** * Constructs the ResourceThread for a CSS file * * @param cssDocInfo A DocumentInfo object with the URL this CSS file should be fetched from * @param handler The RequestHandler through which to retrieve the image * @param threadQueue The main queue, for callback purposes */ ResourceThread(DocumentInfo cssDocInfo,HTMLComponent htmlC,ResourceThreadQueue threadQueue) { this.cssDocInfo=cssDocInfo; this.handler=htmlC.getRequestHandler(); this.threadQueue=threadQueue; this.htmlC=htmlC; } /** * Cancels this thread */ void cancel() { cancelled=true; } /** * Adds a label which has the same URL, useful for duplicate images in the same page * * @param label A label which has the same image URL */ void addLabel(Component label) { if (labels==null) { labels=new Vector(); } labels.addElement(label); } /** * This is the main entry point to this runnable, it checks whether the callback is synchronous or async. * According to that it either runs this as a thread (sync) or simply calls the async method (async implements threading itself) */ void go() { if (handler instanceof AsyncDocumentRequestHandler) { DocumentInfo docInfo=cssDocInfo!=null?cssDocInfo:new DocumentInfo(imageUrl,DocumentInfo.TYPE_IMAGE); ((AsyncDocumentRequestHandler)handler).resourceRequestedAsync(docInfo, this); } else { new Thread(this).start(); } } /** * {@inheritDoc} */ public void run() { DocumentInfo docInfo=cssDocInfo!=null?cssDocInfo:new DocumentInfo(imageUrl,DocumentInfo.TYPE_IMAGE); InputStream is = handler.resourceRequested(docInfo); streamReady(is, docInfo); } /** * {@inheritDoc} */ public void streamReady(InputStream is,DocumentInfo docInfo) { try { if (is==null) { if (htmlC.getHTMLCallback()!=null) { htmlC.getHTMLCallback().parsingError(cssDocInfo!=null?HTMLCallback.ERROR_CSS_NOT_FOUND:HTMLCallback.ERROR_IMAGE_NOT_FOUND, null, null, null, (cssDocInfo!=null?"CSS":"Image")+" not found at "+(cssDocInfo!=null?cssDocInfo.getUrl():imageUrl)); } } else { if(cssDocInfo!=null) { // CSS if (HTMLComponent.SUPPORT_CSS) { // no need to also check if loadCSS is true, since if we got so far - it is... CSSElement result = CSSParser.getInstance().parseCSSSegment(new InputStreamReader(is),is,htmlC,cssDocInfo.getUrl()); result.setAttribute(result.getAttributeName(new Integer(CSSElement.CSS_PAGEURL)), cssDocInfo.getUrl()); htmlC.addToExternalCSS(result); } threadQueue.threadFinished(this,true); return; } else { img=Image.createImage(is); if (img==null) { if (htmlC.getHTMLCallback()!=null) { htmlC.getHTMLCallback().parsingError(HTMLCallback.ERROR_IMAGE_BAD_FORMAT, null, null, null, "Image could not be created from "+imageUrl); } } } } if (img==null) { threadQueue.threadFinished(this,false); return; } if (!cancelled) { Display.getInstance().callSerially(new Runnable() { public void run() { handleImage(img,imgLabel); if (labels!=null) { for(Enumeration e=labels.elements();e.hasMoreElements();) { Component cmp=(Component)e.nextElement(); handleImage(img,cmp); } } } }); threadQueue.threadFinished(this,true); } } catch (IOException ioe) { if (htmlC.getHTMLCallback()!=null) { htmlC.getHTMLCallback().parsingError(HTMLCallback.ERROR_IMAGE_BAD_FORMAT, null, null, null, "Image could not be created from "+imageUrl+": "+ioe.getMessage()); } if(!cancelled) { threadQueue.threadFinished(this,false); } } } /** * After a successful download, this handles placing the image on the label and resizing if necessary * * @param img The image * @param label The label to apply the image on */ private void handleImage(Image img,Component cmp) { boolean bgUnselected=(threadQueue.bgImageCompsUnselected.contains(cmp));; boolean bgSelected=(threadQueue.bgImageCompsSelected.contains(cmp)); boolean bgPressed=(threadQueue.bgImageCompsPressed.contains(cmp)); handleImage(img, cmp,bgUnselected,bgSelected,bgPressed); } /** * After a successful download, this handles placing the image on the label and resizing if necessary * * @param img The image * @param cmp The component to apply the image on * @param bgUnselected true if the image should be used as a background for the component when it is unselected, false otherwise * @param bgSelected true if the image should be used as a background for the component when it is selected, false otherwise * @param bgPressed true if the image should be used as a background for the component when it is pressed, false otherwise */ void handleImage(Image img,Component cmp,boolean bgUnselected,boolean bgSelected,boolean bgPressed) { boolean bg=false; if (bgUnselected) { cmp.getUnselectedStyle().setBgImage(img); bg=true; } if (bgSelected) { cmp.getSelectedStyle().setBgImage(img); bg=true; } if (bgPressed) { if (cmp instanceof HTMLLink) { ((HTMLLink)cmp).getPressedStyle().setBgImage(img); } bg=true; } if (bg) { cmp.repaint(); return; } Label label = (Label)cmp; label.setText(""); // remove the alternate text (important to do here before checking the width/height) // Was set in HTMLComponent.handleImage if the width/height attributes were in the tag int width=label.getPreferredW()-label.getStyle().getPadding(Component.LEFT)-label.getStyle().getPadding(Component.RIGHT); int height=label.getPreferredH()-label.getStyle().getPadding(Component.TOP)-label.getStyle().getPadding(Component.BOTTOM); if (width!=0) { if (height==0) { // If only width was specified, height should be calculated so the image keeps its aspect ratio height=img.getHeight()*width/img.getWidth(); } } else if (height!=0) { if (width==0) { // If only height was specified, width should be calculated so the image keeps its aspect ratio width=img.getWidth()*height/img.getHeight(); } } if (width!=0) { // if any of width or height were not 0, the other one was set to non-zero above, so this check suffices img=img.scaled(width, height); width+=label.getStyle().getPadding(Component.LEFT)+label.getStyle().getPadding(Component.RIGHT); height+=label.getStyle().getPadding(Component.TOP)+label.getStyle().getPadding(Component.BOTTOM); label.setPreferredSize(new Dimension(width,height)); } label.setIcon(img); htmlC.revalidate(); if (label.getClientProperty(HTMLComponent.CLIENT_PROPERTY_IMG_BORDER)==null) { // if this property is defined, it means the image had a border already label.getUnselectedStyle().setBorder(null); //remove the border which is a sign the image is loading } else { int borderSize=((Integer)label.getClientProperty(HTMLComponent.CLIENT_PROPERTY_IMG_BORDER)).intValue(); // Note that padding is set here and not in handleImage since we rely on the image size (which includes padding) to know if a width/height was specified label.getUnselectedStyle().setPadding(borderSize,borderSize,borderSize,borderSize); label.getSelectedStyle().setPadding(borderSize,borderSize,borderSize,borderSize); } } } }
package test; import android.test.InstrumentationTestCase; import com.agilie.dribbblesdk.domain.Bucket; import com.agilie.dribbblesdk.domain.Shot; import com.agilie.dribbblesdk.service.retrofit.DribbbleWebServiceHelper; import com.agilie.dribbblesdk.service.retrofit.services.DribbbleBucketsService; import java.util.List; import java.util.concurrent.CountDownLatch; import okhttp3.OkHttpClient; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; import retrofit2.Retrofit; public class DribbbleBucketsServiceTest extends InstrumentationTestCase { private static final String AUTH_TOKEN_FOR_TEST = "55f066c4782e254aa6b6ce516784df3a98b03f22ce42bd7f7495ba2355cda14e"; private static final long TEST_BUCKET_ID = 267166; private static final long TEST_AUTHORIZED_USER_BUCKET_ID = 274205; private static final long TEST_SHOT_TO_ADD_REMOVE_FROM_BUCKET = 1999287; private static final long TEST_SHOT_ID = 1997749; private DribbbleBucketsService authorizedDribbbleService; private DribbbleBucketsService dribbbleService; public DribbbleBucketsServiceTest() { OkHttpClient.Builder okHttpClientBuilder = DribbbleWebServiceHelper.getOkHttpClientBuilder(AUTH_TOKEN_FOR_TEST); Retrofit retrofit = DribbbleWebServiceHelper.getRetrofitBuilder(okHttpClientBuilder).build(); authorizedDribbbleService = DribbbleWebServiceHelper.getDribbbleBucketService(retrofit); dribbbleService = DribbbleWebServiceHelper.getDribbbleBucketService(retrofit); } public void testGetBucket() throws Throwable { final CountDownLatch signal = new CountDownLatch(1); runTestOnUiThread(new Runnable() { public void run() { dribbbleService.getBucket(TEST_BUCKET_ID) .enqueue(new Callback<Bucket>() { @Override public void onResponse(Call<Bucket> call, Response<Bucket> response) { assertNotNull(response.body()); signal.countDown(); } @Override public void onFailure(Call<Bucket> call, Throwable t) { assertTrue("testGetBucket is failed", false); signal.countDown(); } }); } }); signal.await(); } public void testCreateDeleteBucket() throws Throwable { final CountDownLatch signal = new CountDownLatch(1); runTestOnUiThread(new Runnable() { public void run() { Bucket bucket = new Bucket(); bucket.setName("BucketName"); bucket.setDescription("Description for this bucket"); authorizedDribbbleService.createBucket(bucket) .enqueue(new Callback<Bucket>() { @Override public void onResponse(Call<Bucket> call, Response<Bucket> response) { assertNotNull(response.body()); authorizedDribbbleService.deleteBucket(response.body().getId()).enqueue(new Callback<Void>() { @Override public void onResponse(Call<Void> call, Response<Void> response) { signal.countDown(); } @Override public void onFailure(Call<Void> call, Throwable t) { assertTrue("testDeleteBucket is failed", false); signal.countDown(); } }); } @Override public void onFailure(Call<Bucket> call, Throwable t) { assertTrue("testCreateBucket is failed", false); signal.countDown(); } }); } }); signal.await(); } public void testCreateUpdateDeleteBucket() throws Throwable { final CountDownLatch signal = new CountDownLatch(1); runTestOnUiThread(new Runnable() { public void run() { Bucket bucket = new Bucket(); bucket.setName("BucketName"); bucket.setDescription("Description for this bucket"); authorizedDribbbleService.createBucket(bucket) .enqueue(new Callback<Bucket>() { @Override public void onResponse(Call<Bucket> call, Response<Bucket> response) { Bucket bucket = response.body(); assertNotNull(bucket); bucket.setName("New BucketName"); authorizedDribbbleService.updateBucket(bucket.getId(), bucket) .enqueue(new Callback<Bucket>() { @Override public void onResponse(Call<Bucket> call, Response<Bucket> response) { Bucket bucket = response.body(); assertNotNull(bucket); authorizedDribbbleService.deleteBucket(bucket.getId()) .enqueue(new Callback<Void>() { @Override public void onResponse(Call<Void> call, Response<Void> response) { signal.countDown(); } @Override public void onFailure(Call<Void> call, Throwable t) { assertTrue("testDeleteBucket is failed", false); signal.countDown(); } }); } @Override public void onFailure(Call<Bucket> call, Throwable t) { assertTrue("testUpdateBucket is failed", false); signal.countDown(); } }); } @Override public void onFailure(Call<Bucket> call, Throwable t) { assertTrue("testCreateBucket is failed", false); signal.countDown(); } }); } }); signal.await(); } public void testGetShotsForBucket() throws Throwable { final CountDownLatch signal = new CountDownLatch(1); runTestOnUiThread(new Runnable() { public void run() { dribbbleService.getShotsForBucket(TEST_BUCKET_ID) .enqueue(new Callback<List<Shot>>() { @Override public void onResponse(Call<List<Shot>> call, Response<List<Shot>> response) { assertNotNull(response.body()); signal.countDown(); } @Override public void onFailure(Call<List<Shot>> call, Throwable t) { assertTrue("testGetShotsForBucket is failed", false); signal.countDown(); } } ); } }); signal.await(); } public void testAddRemoveBucketShot() throws Throwable { final CountDownLatch signal = new CountDownLatch(1); runTestOnUiThread(new Runnable() { public void run() { final Shot shot = new Shot(); shot.setId(TEST_SHOT_ID); authorizedDribbbleService.addShotToBucket(TEST_AUTHORIZED_USER_BUCKET_ID, TEST_SHOT_TO_ADD_REMOVE_FROM_BUCKET) .enqueue(new Callback<Void>() { @Override public void onResponse(Call<Void> call, Response<Void> response) { authorizedDribbbleService.removeShotFromBucket(TEST_AUTHORIZED_USER_BUCKET_ID, TEST_SHOT_TO_ADD_REMOVE_FROM_BUCKET).enqueue( new Callback<Void>() { @Override public void onResponse(Call<Void> call, Response<Void> response) { signal.countDown(); } @Override public void onFailure(Call<Void> call, Throwable t) { assertTrue("testRemoveShotFromBucket is failed", false); signal.countDown(); } } ); } @Override public void onFailure(Call<Void> call, Throwable t) { } } ); } }); signal.await(); } }
package xyz.stepsecret.arrayproject3; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.ActivityInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.Color; import android.location.LocationManager; import android.net.Uri; import android.os.Bundle; import android.os.CountDownTimer; import android.provider.Settings; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.util.Log; import com.gun0912.tedpermission.PermissionListener; import com.gun0912.tedpermission.TedPermission; import java.util.ArrayList; import java.util.Locale; import cn.pedant.SweetAlert.SweetAlertDialog; import xyz.stepsecret.arrayproject3.Form.LoginActivity; import xyz.stepsecret.arrayproject3.TinyDB.TinyDB; /** * Created by stepsecret on 14/9/2559. */ public class StartUp extends AppCompatActivity { private SweetAlertDialog pDialog; private TinyDB Store_data; private String temp_language; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_startup); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); pDialog(); //turnGPSOn(); Initial_(); showGPSDisabledAlertToUser(); } private void showGPSDisabledAlertToUser(){ LocationManager locationManager = (LocationManager) getSystemService(LOCATION_SERVICE); if (locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER)) { pDialog.cancel(); Intent i = new Intent(getApplicationContext(), LoginActivity.class); startActivity(i); finish(); } else { AlertDialog.Builder dialog = new AlertDialog.Builder(this); dialog.setTitle("Turn On GPS"); dialog.setIcon(R.mipmap.ic_launcher); dialog.setCancelable(true); dialog.setMessage("Please!!! Turn On GPS"); dialog.setPositiveButton("Yes", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { Intent intent = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS); startActivityForResult(intent, 1); } }); dialog.setNegativeButton("No", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); dialog.show(); } } private void pDialog() { pDialog = new SweetAlertDialog(this, SweetAlertDialog.PROGRESS_TYPE); pDialog.getProgressHelper().setBarColor(Color.parseColor("#A5DC86")); pDialog.setTitleText("Loading"); pDialog.setCancelable(false); pDialog.show(); } public void setLanguage(String language) { Locale locale = new Locale(language); Locale.setDefault(locale); Configuration config = new Configuration(); config.locale = locale; //config.setLocale(locale); // api 17 getBaseContext().getResources().updateConfiguration(config, getBaseContext().getResources().getDisplayMetrics()); //Store_data.putString("language", language); } private void Initial_() { PermissionListener permissionlistener = new PermissionListener() { @Override public void onPermissionGranted() { //Toast.makeText(LoginActivity.this, "Permission Granted", Toast.LENGTH_SHORT).show(); } @Override public void onPermissionDenied(ArrayList<String> deniedPermissions) { //Toast.makeText(LoginActivity.this, "Permission Denied\n" + deniedPermissions.toString(), Toast.LENGTH_SHORT).show(); } }; new TedPermission(this) .setPermissionListener(permissionlistener) .setRationaleMessage("we need permission for write external storage and find your location") .setDeniedMessage("If you reject permission,you can not use this service\n\nPlease turn on permissions at [Setting] > [Permission]") .setGotoSettingButtonText("Go to setting") .setPermissions(android.Manifest.permission.WRITE_EXTERNAL_STORAGE, android.Manifest.permission.ACCESS_FINE_LOCATION, android.Manifest.permission.ACCESS_COARSE_LOCATION) .check(); Store_data = new TinyDB(getApplicationContext()); temp_language = Store_data.getString("language"); if(temp_language != null && !temp_language.isEmpty()) { setLanguage(temp_language); } else { Store_data.putString("message", "sound"); Store_data.putString("notification", "sound"); Store_data.putString("language", "en"); Store_data.putInt("number_message", 0); setLanguage("en"); } } @Override public void onBackPressed() { super.onBackPressed(); pDialog.cancel(); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); new CountDownTimer(5000, 1000){ public void onTick(long millisUntilDone){ Log.e(" StartUp "," "+millisUntilDone); } public void onFinish() { showGPSDisabledAlertToUser(); } }.start(); } }
package it.unisa.dia.gas.plaf.jpbc.util.math; import java.math.BigInteger; import java.security.SecureRandom; import static java.math.BigInteger.ONE; import static java.math.BigInteger.ZERO; /** * @author Angelo De Caro ([email protected]) */ public class BigIntegerUtils { public static final BigInteger TWO = BigInteger.valueOf(2); public static final BigInteger THREE = BigInteger.valueOf(3); public static final BigInteger FOUR = BigInteger.valueOf(4); public static final BigInteger FIVE = BigInteger.valueOf(5); public static final BigInteger SIX = BigInteger.valueOf(6); public static final BigInteger SEVEN = BigInteger.valueOf(7); public static final BigInteger EIGHT = BigInteger.valueOf(8); public static final BigInteger TWELVE = BigInteger.valueOf(12); public static final BigInteger MAXINT = BigInteger.valueOf(Integer.MAX_VALUE); public static final BigInteger ITERBETTER = ONE.shiftLeft(1024); public static boolean isOdd(BigInteger bigInteger) { return bigInteger.testBit(0); } //windowed naf form of BigInteger n, k is the window size public static byte[] naf(BigInteger n, byte k) { // The window NAF is at most 1 element longer than the binary // representation of the integer n. byte can be used instead of short or // int unless the window width is larger than 8. For larger width use // short or int. However, a width of more than 8 is not efficient for // m = log2(q) smaller than 2305 Bits. Note: Values for m larger than // 1000 Bits are currently not used in practice. byte[] wnaf = new byte[n.bitLength() + 1]; // 2^width as short and BigInteger short pow2wB = (short) (1 << k); BigInteger pow2wBI = BigInteger.valueOf(pow2wB); int i = 0; // The actual length of the WNAF int length = 0; // while n >= 1 while (n.signum() > 0) { // if n is odd if (n.testBit(0)) { // n mod 2^width BigInteger remainder = n.mod(pow2wBI); // if remainder > 2^(width - 1) - 1 if (remainder.testBit(k - 1)) { wnaf[i] = (byte) (remainder.intValue() - pow2wB); } else { wnaf[i] = (byte) remainder.intValue(); } // wnaf[i] is now in [-2^(width-1), 2^(width-1)-1] n = n.subtract(BigInteger.valueOf(wnaf[i])); length = i; } else { wnaf[i] = 0; } // n = n/2 n = n.shiftRight(1); i++; } length++; // Reduce the WNAF array to its actual length byte[] wnafShort = new byte[length]; System.arraycopy(wnaf, 0, wnafShort, 0, length); return wnafShort; } public static int hammingWeight(byte[] bytes, int length) { int weight = 0; for (int i = 0; i <= length; i++) { if (bytes[i] != 0) weight++; } return weight; } public static BigInteger generateSolinasPrime(int bits, SecureRandom random) { // r is picked to be a Solinas prime, that is, // r has the form 2a +- 2b +- 1 for some integers 0 < b < a. BigInteger r, q; int exp2, sign1; while (true) { r = BigInteger.ZERO; if (random.nextInt(Integer.MAX_VALUE) % 2 != 0) { exp2 = bits - 1; sign1 = 1; } else { exp2 = bits; sign1 = -1; } r = r.setBit(exp2); q = BigInteger.ZERO.setBit((random.nextInt(Integer.MAX_VALUE) % (exp2 - 1)) + 1); if (sign1 > 0) { r = r.add(q); } else { r = r.subtract(q); } if (random.nextInt(Integer.MAX_VALUE) % 2 != 0) { r = r.add(BigInteger.ONE); } else { r = r.subtract(BigInteger.ONE); } if (r.isProbablePrime(10)) return r; } } public static BigInteger factorial(int n) { return factorial(BigInteger.valueOf(n)); } public static BigInteger factorial(BigInteger n) { if (n.equals(ZERO)) return ONE; BigInteger i = n.subtract(ONE); while (i.compareTo(ZERO) > 0) { n = n.multiply(i); i = i.subtract(ONE); } return n; } /** * Compute trace of Frobenius at q^n given trace at q * see p.105 of Blake, Seroussi and Smart * * @param q * @param trace * @param n * @return */ public static BigInteger computeTrace(BigInteger q, BigInteger trace, int n) { int i; BigInteger c0, c1, c2; BigInteger t0; c2 = TWO; c1 = trace; for (i = 2; i <= n; i++) { c0 = trace.multiply(c1); t0 = q.multiply(c2); c0 = c0.subtract(t0); c2 = c1; c1 = c0; } return c1; } // Given q, t such that #E(F_q) = q - t + 1, compute #E(F_q^k). public static BigInteger pbc_mpz_curve_order_extn(BigInteger q, BigInteger t, int k) { BigInteger z = q.pow(k).add(BigInteger.ONE); BigInteger tk = computeTrace(q, t, k); z = z.subtract(tk); return z; } public static boolean isDivisible(BigInteger a, BigInteger b) { return a.remainder(b).compareTo(ZERO) == 0; } public static boolean isPerfectSquare(BigInteger n) { return fullSqrt(n)[1].signum() == 0; } public static BigInteger sqrt(BigInteger n) { return fullSqrt(n)[0]; } /* Compute the integer square root of n Precondition: n >= 0 Postcondition: Result sr has the property sr[0]^2 <= n < (sr[0] + 1)^2 and (sr[0]^2 + sr[1] = n) */ public static BigInteger[] fullSqrt(BigInteger n) { if (n.compareTo(MAXINT) < 1) { long ln = n.longValue(); long s = (long) java.lang.Math.sqrt(ln); return new BigInteger[]{ BigInteger.valueOf(s), BigInteger.valueOf(ln - s * s) }; } BigInteger[] sr = isqrtInternal(n, n.bitLength() - 1); if (sr[1].signum() < 0) { return new BigInteger[]{ sr[0].subtract(ONE), sr[1].add(sr[0].shiftLeft(1)).subtract(ONE)}; } return sr; } /** * Calculate the Legendre symbol (a/p). This is defined only for p an odd positive prime, * and for such p it's identical to the Jacobi symbol. * * @param a * @param n * @return */ public static int legendre(BigInteger a, BigInteger n) { return jacobi(a, n); } public static int jacobi(BigInteger a, BigInteger n) { /* Precondition: a, n >= 0; n is odd */ /* int ans = 0; if (ZERO.equals(a)) ans = (ONE.equals(n)) ? 1 : 0; else if (TWO.equals(a)) { BigInteger mod = n.mod(EIGHT); if (ONE.equals(mod) || SEVEN.equals(mod)) ans = 1; else if (THREE.equals(mod) || FIVE.equals(mod)) ans = -1; } else if (a.compareTo(n) >= 0) ans = jacobi(a.mod(n), n); else if (ZERO.equals(a.mod(TWO))) ans = jacobi(TWO, n) * jacobi(a.divide(TWO), n); else ans = (THREE.equals(a.mod(FOUR)) && THREE.equals(n.mod(FOUR))) ? -jacobi(n, a) : jacobi(n, a); return ans;*/ if (ZERO.equals(a)) return 0; // (0/n) = 0 int ans = 1; BigInteger temp; if (a.compareTo(ZERO) == -1) { a = a.negate(); // (a/n) = (-a/n)*(-1/n) if (n.mod(FOUR).equals(THREE)) ans = -ans; // (-1/n) = -1 if n = 3 ( mod 4 ) } if (a.equals(ONE)) return ans; // (1/n) = 1 while (!ZERO.equals(a)) { if (a.compareTo(ZERO) == -1) { a = a.negate(); // (a/n) = (-a/n)*(-1/n) if (n.mod(FOUR).equals(THREE)) ans = -ans; // (-1/n) = -1 if n = 3 ( mod 4 ) } while (a.mod(TWO).equals(ZERO)) { a = a.divide(TWO); // Property (iii) BigInteger mod = n.mod(EIGHT); if (mod.equals(THREE) || mod.equals(FIVE)) ans = -ans; } // Property (iv) temp = a; a = n; n = temp; if (a.mod(FOUR).equals(THREE) && n.mod(FOUR).equals(THREE)) ans = -ans; // Property (iv) a = a.mod(n); // because (a/p) = (a%p / p ) and a%pi = (a%n)%pi if n % pi = 0 if (a.compareTo(n.divide(TWO)) == 1) a = a.subtract(n); } if (n.equals(ONE)) return ans; return 0; } public static int scanOne(BigInteger a, int startIndex) { for (int i = startIndex, size = a.bitLength(); i < size; i++) { if (a.testBit(i)) return i; } return -1; } public static BigInteger getRandom(BigInteger limit) { return getRandom(limit, new SecureRandom()); } public static BigInteger getRandom(BigInteger limit, SecureRandom random) { BigInteger result; do { result = new BigInteger(limit.bitLength(), random); } while (limit.compareTo(result) <= 0); return result; } public static BigInteger getRandom(int nbBits, SecureRandom random) { if (nbBits <= 1) return random.nextBoolean() ? BigInteger.ZERO : BigInteger.ONE; else return new BigInteger(nbBits, random).subtract(BigInteger.ONE.shiftLeft(nbBits - 1)); } /** * Compute trace of Frobenius at q^n given trace at q. * See p.105 of Blake, Seroussi and Smart. * * @param q * @param trace * @param n * @return */ public static BigInteger traceN(BigInteger q, BigInteger trace, int n) { BigInteger c2 = TWO; BigInteger c1 = trace; for (int i = 2; i <= n; i++) { BigInteger c0 = trace.multiply(c1); BigInteger t0 = q.multiply(c2); c0 = c0.subtract(t0); c2 = c1; c1 = c0; } return c1; } /* Compute the integer square root of n or a number which is too large by one Precondition: n >= 0 and 2^log2n <= n < 2^(log2n + 1), i.e. log2n = floor(log2(n)) Postcondition: Result sr has the property (sr[0]^2 - 1) <= n < (sr[0] + 1)^2 and (sr[0]^2 + sr[1] = n) */ private static BigInteger[] isqrtInternal(BigInteger n, int log2n) { if (n.compareTo(MAXINT) < 1) { int ln = n.intValue(), s = (int) java.lang.Math.sqrt(ln); return new BigInteger[]{BigInteger.valueOf(s), BigInteger.valueOf(ln - s * s)}; } if (n.compareTo(ITERBETTER) < 1) { int d = 7 * (log2n / 14 - 1), q = 7; BigInteger s = BigInteger.valueOf((long) java.lang.Math.sqrt(n.shiftRight(d << 1).intValue())); while (d > 0) { if (q > d) q = d; s = s.shiftLeft(q); d -= q; q <<= 1; s = s.add(n.shiftRight(d << 1).divide(s)).shiftRight(1); } return new BigInteger[]{s, n.subtract(s.multiply(s))}; } int log2b = log2n >> 2; BigInteger mask = ONE.shiftLeft(log2b).subtract(ONE); BigInteger[] sr = isqrtInternal(n.shiftRight(log2b << 1), log2n - (log2b << 1)); BigInteger s = sr[0]; BigInteger[] qu = sr[1].shiftLeft(log2b).add(n.shiftRight(log2b).and(mask)).divideAndRemainder(s.shiftLeft(1)); BigInteger q = qu[0]; return new BigInteger[]{s.shiftLeft(log2b).add(q), qu[1].shiftLeft(log2b).add(n.and(mask)).subtract(q.multiply(q))}; } public static int hammingWeight(BigInteger value) { int weight = 0; for (int i = 0; i <= value.bitLength(); i++) { if (value.testBit(i)) weight++; } return weight; } public static BigInteger modNear(BigInteger a, BigInteger b) { BigInteger res = a.mod(b); if (res.compareTo(b.shiftRight(1)) == 1) res = res.subtract(b); return res; } public static BigInteger mod(BigInteger a, BigInteger b) { BigInteger res = a.mod(b); return res; } /** * Divides `n` with primes up to `limit`. For each factor found, * call `fun`. If the callback returns nonzero, then aborts and returns 1. * Otherwise returns 0. */ public static abstract class TrialDivide { protected BigInteger limit; public TrialDivide(BigInteger limit) { this.limit = limit; } public int trialDivide(BigInteger n) { BigInteger m = n; BigInteger p = TWO; while (m.compareTo(BigInteger.ONE) != 0) { if (m.isProbablePrime(10)) p = m; if (limit != null && !limit.equals(BigInteger.ZERO) && p.compareTo(limit) > 0) p = m; if (isDivisible(m, p)) { int mul = 0; do { m = m.divide(p); mul++; } while (isDivisible(m, p)); if (fun(p, mul) != 0) return 1; } p = p.nextProbablePrime(); } return 0; } protected abstract int fun(BigInteger factor, int multiplicity); } }
package com.gdgebolowa.watchlist.activity; import android.content.ActivityNotFoundException; import android.content.Intent; import android.content.pm.ActivityInfo; import android.net.Uri; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.util.DisplayMetrics; import android.view.MenuItem; import android.view.View; import android.widget.TextView; import com.android.volley.Request; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.JsonObjectRequest; import com.google.android.gms.analytics.HitBuilders; import com.google.android.gms.analytics.Tracker; import com.gdgebolowa.watchlist.R; import com.gdgebolowa.watchlist.Watchlist; import com.gdgebolowa.watchlist.adapter.VideoAdapter; import com.gdgebolowa.watchlist.adapter.VideoAdapter.OnVideoClickListener; import com.gdgebolowa.watchlist.model.Video; import com.gdgebolowa.watchlist.util.ApiHelper; import com.gdgebolowa.watchlist.util.VolleySingleton; import com.gdgebolowa.watchlist.util.YoutubeHelper; import com.gdgebolowa.watchlist.widget.ItemPaddingDecoration; import org.json.JSONArray; import org.json.JSONObject; import java.util.ArrayList; import butterknife.BindView; import butterknife.BindBool; import butterknife.ButterKnife; import butterknife.OnClick; public class VideoActivity extends AppCompatActivity implements OnVideoClickListener { private Tracker tracker; private String movieId; private VideoAdapter adapter; private boolean isLoading = false; @BindBool(R.bool.is_tablet) boolean isTablet; @BindView(R.id.toolbar) Toolbar toolbar; @BindView(R.id.toolbar_title) TextView toolbarTitle; @BindView(R.id.toolbar_subtitle) TextView toolbarSubtitle; @BindView(R.id.video_list) RecyclerView videoList; @BindView(R.id.error_message) View errorMessage; @BindView(R.id.progress_circle) View progressCircle; @BindView(R.id.no_results) View noResults; @BindView(R.id.no_results_message) TextView noResultsMessage; // Activity lifecycle @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_video); ButterKnife.bind(this); movieId = getIntent().getStringExtra(Watchlist.MOVIE_ID); String movieName = getIntent().getStringExtra(Watchlist.MOVIE_NAME); setSupportActionBar(toolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setTitle(""); toolbarTitle.setText(R.string.videos_title); toolbarSubtitle.setText(movieName); final int numOfColumns = getNumberOfColumns(); GridLayoutManager layoutManager = new GridLayoutManager(this, numOfColumns); layoutManager.setSpanSizeLookup(new GridLayoutManager.SpanSizeLookup() { @Override public int getSpanSize(int position) { if (position == adapter.videoList.size()) { return numOfColumns; } else { return 1; } } }); adapter = new VideoAdapter(this, new ArrayList<Video>(), this); videoList.setHasFixedSize(true); videoList.setLayoutManager(layoutManager); videoList.addItemDecoration(new ItemPaddingDecoration(this, R.dimen.dist_small)); videoList.setAdapter(adapter); if (savedInstanceState == null) { downloadVideosList(); } // Lock orientation for tablets if (isTablet) { setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); } // Load Analytics Tracker tracker = ((Watchlist) getApplication()).getTracker(); } @Override protected void onResume() { super.onResume(); // Send screen name to analytics tracker.setScreenName(getString(R.string.screen_movie_videos)); tracker.send(new HitBuilders.ScreenViewBuilder().build()); } @Override protected void onStop() { super.onStop(); VolleySingleton.getInstance(this).requestQueue.cancelAll(this.getClass().getName()); } // Save/restore state @Override public void onSaveInstanceState(Bundle outState) { if (adapter != null) { outState.putParcelableArrayList(Watchlist.VIDEO_LIST, adapter.videoList); outState.putBoolean(Watchlist.IS_LOADING, isLoading); } super.onSaveInstanceState(outState); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); adapter.videoList = savedInstanceState.getParcelableArrayList(Watchlist.VIDEO_LIST); isLoading = savedInstanceState.getBoolean(Watchlist.IS_LOADING); // If activity was previously downloading and it stopped, download again if (isLoading) { downloadVideosList(); } else { onDownloadSuccessful(); } } // Helper method public int getNumberOfColumns() { DisplayMetrics displayMetrics = getResources().getDisplayMetrics(); float widthPx = displayMetrics.widthPixels; float desiredPx = getResources().getDimensionPixelSize(R.dimen.video_item_width); int columns = Math.round(widthPx / desiredPx); if (columns <= 1) { return 1; } else { return columns; } } // Toolbar actions @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == android.R.id.home) { finish(); return true; } else { return false; } } // JSON parsing and display private void downloadVideosList() { isLoading = true; if (adapter == null) { adapter = new VideoAdapter(this, new ArrayList<Video>(), this); videoList.setAdapter(adapter); } JsonObjectRequest request = new JsonObjectRequest( Request.Method.GET, ApiHelper.getVideosLink(this, movieId), null, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject object) { try { JSONArray results = object.getJSONArray("results"); for (int i = 0; i < results.length(); i++) { JSONObject vid = results.getJSONObject(i); if (vid.getString("site").equals("YouTube")) { String title = vid.getString("name"); String key = vid.getString("key"); String subtitle = vid.getString("size") + "p"; Video video = new Video(title, subtitle, key, YoutubeHelper.getThumbnailURL(key), YoutubeHelper.getVideoURL(key)); adapter.videoList.add(video); } } onDownloadSuccessful(); } catch (Exception ex) { onDownloadFailed(); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError volleyError) { onDownloadFailed(); } }); request.setTag(getClass().getName()); VolleySingleton.getInstance(this).requestQueue.add(request); } private void onDownloadSuccessful() { isLoading = false; if (adapter.videoList.size() == 0) { noResultsMessage.setText(R.string.videos_no_results); noResults.setVisibility(View.VISIBLE); errorMessage.setVisibility(View.GONE); progressCircle.setVisibility(View.GONE); videoList.setVisibility(View.GONE); } else { errorMessage.setVisibility(View.GONE); progressCircle.setVisibility(View.GONE); videoList.setVisibility(View.VISIBLE); adapter.notifyDataSetChanged(); } } private void onDownloadFailed() { isLoading = false; errorMessage.setVisibility(View.VISIBLE); progressCircle.setVisibility(View.GONE); videoList.setVisibility(View.GONE); } // Click events @OnClick(R.id.try_again) public void onTryAgainClicked() { videoList.setVisibility(View.GONE); errorMessage.setVisibility(View.GONE); progressCircle.setVisibility(View.VISIBLE); adapter = null; downloadVideosList(); } @Override public void onVideoClicked(int position) { try { Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("vnd.youtube:" + adapter.videoList.get(position).youtubeID)); startActivity(intent); } catch (ActivityNotFoundException ex) { Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.youtube.com/watch?v=" + adapter.videoList.get(position).youtubeID)); startActivity(intent); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.performancestatistics; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.IgniteCache; import org.apache.ignite.Ignition; import org.apache.ignite.cache.QueryEntity; import org.apache.ignite.cache.query.FieldsQueryCursor; import org.apache.ignite.cache.query.Query; import org.apache.ignite.cache.query.ScanQuery; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.client.Config; import org.apache.ignite.client.IgniteClient; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.ClientConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.processors.cache.query.GridCacheQueryType; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import static org.apache.ignite.cluster.ClusterState.ACTIVE; import static org.apache.ignite.cluster.ClusterState.INACTIVE; import static org.apache.ignite.internal.processors.cache.query.GridCacheQueryType.SCAN; import static org.apache.ignite.internal.processors.cache.query.GridCacheQueryType.SQL_FIELDS; import static org.apache.ignite.internal.processors.performancestatistics.AbstractPerformanceStatisticsTest.ClientType.CLIENT; import static org.apache.ignite.internal.processors.performancestatistics.AbstractPerformanceStatisticsTest.ClientType.SERVER; import static org.apache.ignite.internal.processors.performancestatistics.AbstractPerformanceStatisticsTest.ClientType.THIN_CLIENT; import static org.apache.ignite.internal.processors.query.QueryUtils.DFLT_SCHEMA; import static org.junit.Assume.assumeFalse; /** Tests query performance statistics. */ @RunWith(Parameterized.class) public class PerformanceStatisticsQueryTest extends AbstractPerformanceStatisticsTest { /** Cache entry count. */ private static final int ENTRY_COUNT = 100; /** Test cache 2 name. */ private static final String CACHE_2 = "cache2"; /** Test SQL table name. */ private static final String SQL_TABLE = "test"; /** Page size. */ @Parameterized.Parameter public int pageSize; /** Client type to run queries from. */ @Parameterized.Parameter(1) public ClientType clientType; /** @return Test parameters. */ @Parameterized.Parameters(name = "pageSize={0}, clientType={1}") public static Collection<?> parameters() { List<Object[]> res = new ArrayList<>(); for (Integer pageSize : new Integer[] {ENTRY_COUNT, ENTRY_COUNT / 10}) { for (ClientType clientType : new ClientType[] {SERVER, CLIENT, THIN_CLIENT}) res.add(new Object[] {pageSize, clientType}); } return res; } /** Server. */ private static IgniteEx srv; /** Client. */ private static IgniteEx client; /** Thin client. */ private static IgniteClient thinClient; /** Cache. */ private static IgniteCache<Integer, Integer> cache; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setDataStorageConfiguration(new DataStorageConfiguration() .setDefaultDataRegionConfiguration( new DataRegionConfiguration() .setPersistenceEnabled(true))); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); stopAllGrids(); cleanPersistenceDir(); srv = startGrids(2); thinClient = Ignition.startClient(new ClientConfiguration().setAddresses(Config.SERVER)); client = startClientGrid("client"); client.cluster().state(ACTIVE); cache = client.getOrCreateCache(new CacheConfiguration<Integer, Integer>() .setName(DEFAULT_CACHE_NAME) .setSqlSchema(DFLT_SCHEMA) .setQueryEntities(Collections.singletonList( new QueryEntity(Integer.class, Integer.class) .setTableName(DEFAULT_CACHE_NAME))) ); IgniteCache<Object, Object> cache2 = client.getOrCreateCache(new CacheConfiguration<>() .setName(CACHE_2) .setSqlSchema(DFLT_SCHEMA) .setQueryEntities(Collections.singletonList( new QueryEntity(Long.class, Long.class) .setTableName(CACHE_2))) ); for (int i = 0; i < ENTRY_COUNT; i++) { cache.put(i, i); cache2.put(i, i * 2); } } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { super.afterTestsStopped(); cleanPersistenceDir(); thinClient.close(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); if (cache != null) cache.query(new SqlFieldsQuery("drop table if exists " + SQL_TABLE)); } /** @throws Exception If failed. */ @Test public void testScanQuery() throws Exception { ScanQuery<Object, Object> qry = new ScanQuery<>().setPageSize(pageSize); checkQuery(SCAN, qry, DEFAULT_CACHE_NAME); } /** @throws Exception If failed. */ @Test public void testSqlFieldsQuery() throws Exception { String sql = "select * from " + DEFAULT_CACHE_NAME; SqlFieldsQuery qry = new SqlFieldsQuery(sql).setPageSize(pageSize); checkQuery(SQL_FIELDS, qry, sql); } /** @throws Exception If failed. */ @Test public void testSqlFieldsJoinQuery() throws Exception { String sql = "select * from " + DEFAULT_CACHE_NAME + " a inner join " + CACHE_2 + " b on a._key = b._key"; SqlFieldsQuery qry = new SqlFieldsQuery(sql).setPageSize(pageSize); checkQuery(SQL_FIELDS, qry, sql); } /** Check query. */ private void checkQuery(GridCacheQueryType type, Query<?> qry, String text) throws Exception { client.cluster().state(INACTIVE); client.cluster().state(ACTIVE); runQueryAndCheck(type, qry, text, true, true); runQueryAndCheck(type, qry, text, true, false); } /** @throws Exception If failed. */ @Test public void testDdlAndDmlQueries() throws Exception { String sql = "create table " + SQL_TABLE + " (id int, val varchar, primary key (id))"; runQueryAndCheck(SQL_FIELDS, new SqlFieldsQuery(sql), sql, false, false); sql = "insert into " + SQL_TABLE + " (id) values (1)"; runQueryAndCheck(SQL_FIELDS, new SqlFieldsQuery(sql), sql, false, false); sql = "update " + SQL_TABLE + " set val = 'abc'"; runQueryAndCheck(SQL_FIELDS, new SqlFieldsQuery(sql), sql, true, false); } /** Runs query and checks statistics. */ private void runQueryAndCheck(GridCacheQueryType expType, Query<?> qry, String expText, boolean hasLogicalReads, boolean hasPhysicalReads) throws Exception { long startTime = U.currentTimeMillis(); cleanPerformanceStatisticsDir(); startCollectStatistics(); Collection<UUID> expNodeIds = new ArrayList<>(); if (clientType == SERVER) { srv.cache(DEFAULT_CACHE_NAME).query(qry).getAll(); expNodeIds.add(srv.localNode().id()); } else if (clientType == CLIENT) { client.cache(DEFAULT_CACHE_NAME).query(qry).getAll(); expNodeIds.add(client.localNode().id()); } else if (clientType == THIN_CLIENT) { thinClient.cache(DEFAULT_CACHE_NAME).query(qry).getAll(); expNodeIds.addAll(F.nodeIds(client.cluster().forServers().nodes())); } Set<UUID> readsNodes = new HashSet<>(); if (hasLogicalReads) srv.cluster().forServers().nodes().forEach(node -> readsNodes.add(node.id())); AtomicInteger queryCnt = new AtomicInteger(); AtomicInteger readsCnt = new AtomicInteger(); HashSet<Long> qryIds = new HashSet<>(); stopCollectStatisticsAndRead(new TestHandler() { @Override public void query(UUID nodeId, GridCacheQueryType type, String text, long id, long queryStartTime, long duration, boolean success) { queryCnt.incrementAndGet(); qryIds.add(id); assertTrue(expNodeIds.contains(nodeId)); assertEquals(expType, type); assertEquals(expText, text); assertTrue(queryStartTime >= startTime); assertTrue(duration >= 0); assertTrue(success); } @Override public void queryReads(UUID nodeId, GridCacheQueryType type, UUID queryNodeId, long id, long logicalReads, long physicalReads) { readsCnt.incrementAndGet(); qryIds.add(id); readsNodes.remove(nodeId); assertTrue(expNodeIds.contains(queryNodeId)); assertEquals(expType, type); assertTrue(logicalReads > 0); assertTrue(hasPhysicalReads ? physicalReads > 0 : physicalReads == 0); } }); assertEquals(1, queryCnt.get()); assertTrue("Query reads expected on nodes: " + readsNodes, readsNodes.isEmpty()); assertEquals(1, qryIds.size()); } /** @throws Exception If failed. */ @Test public void testMultipleStatementsSql() throws Exception { assumeFalse("Multiple statements queries are not supported by thin client.", clientType == THIN_CLIENT); long startTime = U.currentTimeMillis(); LinkedList<String> expQrs = new LinkedList<>(); expQrs.add("create table " + SQL_TABLE + " (id int primary key, val varchar)"); expQrs.add("insert into " + SQL_TABLE + " (id, val) values (1, 'a')"); expQrs.add("insert into " + SQL_TABLE + " (id, val) values (2, 'b'), (3, 'c')"); LinkedList<String> qrsWithReads = new LinkedList<>(); qrsWithReads.add("update " + SQL_TABLE + " set val = 'd' where id = 1"); qrsWithReads.add("select * from " + SQL_TABLE); expQrs.addAll(qrsWithReads); startCollectStatistics(); SqlFieldsQuery qry = new SqlFieldsQuery(F.concat(expQrs, ";")); IgniteEx loadNode = this.clientType == SERVER ? srv : client; List<FieldsQueryCursor<List<?>>> res = loadNode.context().query().querySqlFields(qry, true, false); assertEquals("Unexpected cursors count: " + res.size(), expQrs.size(), res.size()); res.get(4).getAll(); HashSet<Long> qryIds = new HashSet<>(); stopCollectStatisticsAndRead(new TestHandler() { @Override public void query(UUID nodeId, GridCacheQueryType type, String text, long id, long queryStartTime, long duration, boolean success) { if (qrsWithReads.contains(text)) qryIds.add(id); assertEquals(loadNode.localNode().id(), nodeId); assertEquals(SQL_FIELDS, type); assertTrue("Unexpected query: " + text, expQrs.remove(text)); assertTrue(queryStartTime >= startTime); assertTrue(duration >= 0); assertTrue(success); } @Override public void queryReads(UUID nodeId, GridCacheQueryType type, UUID queryNodeId, long id, long logicalReads, long physicalReads) { qryIds.add(id); assertEquals(SQL_FIELDS, type); assertEquals(loadNode.localNode().id(), queryNodeId); assertTrue(logicalReads > 0); assertEquals(0, physicalReads); } }); assertTrue("Queries was not handled: " + expQrs, expQrs.isEmpty()); assertEquals("Unexpected IDs: " + qryIds, qrsWithReads.size(), qryIds.size()); } }
package gmtools.snaptracks; import gmtools.common.ArrayTools; import gmtools.common.Geography; import gmtools.common.KMLUtils; import gmtools.common.Sets; import gmtools.parsers.ColumnIndices; import gmtools.parsers.RawFlightTrackData; import gmtools.tools.SnapTracks; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import uk.me.jstott.jcoord.LatLng; import de.micromata.opengis.kml.v_2_2_0.Document; import de.micromata.opengis.kml.v_2_2_0.Kml; import de.micromata.opengis.kml.v_2_2_0.KmlFactory; import de.micromata.opengis.kml.v_2_2_0.LineString; import de.micromata.opengis.kml.v_2_2_0.Style; /** * copyright (c) 2014-2015 Alexander E.I. Brownlee ([email protected]) * Released under the MIT Licence http://opensource.org/licenses/MIT * Instructions, citation information, licencing and source * are available at https://github.com/gm-tools/gm-tools/ */ public class CleaningRawDataOutliers { /** * Originally conceived as a separate tool, this is now called from SnapTracks * usage: CleaningRawFR24DataOutliers inFile outFile [args...] * args: * -m=0.8 : maximum fraction of points in a track that we'll try to fix (more than this and we'll give up) (default=0.8) * -M=15 : maximum number of points in a track that we'll try to fix (more than this and we'll give up) (default=unlimited) (over 30 seems very slow - an hour or two to process the track, plus several GB of heap needed to store the intermediate results) * -d=n : max distance from airport in km (default is 10) * -alat=x : lat of airport * -alon=y : lon of airport * Not yet implemented: -speeds=a:b,c:d,e:f... : pairs of values defining speed limits for turns. These are MaxTurningAngleDegrees:MaxSpeedMperS */ public static void main(String[] args) { // first, read file List<List<PointInTrack>> flightTracksOriginal = new ArrayList<List<PointInTrack>>(); List<List<PointInTrack>> flightTracksUpdated = new ArrayList<List<PointInTrack>>(); List<String> flightTracksComments = new ArrayList<String>(); String fileNameIn = null; String fileNameOut = null; int elementsPerPointInTrack = 4;// currently 4 for historical data, 3 for live (which is missing timings so can't be used here anyway) int maxBadPoints = Integer.MAX_VALUE; // might want to limit this for speed (if over 30 then it gets very slow) double maxFractionBad = 0.8; // maximum fraction of points allowed to be bad before giving up boolean debug = SnapTracks.GLOBAL_DEBUG_CLEANING; double latAirport = Double.NaN; double lonAirport = Double.NaN; double maxDistanceFromAirportInKM = 10; double maxAltitudeInM = 2000; // points with altitude higher than this will be omitted from the output if (args.length < 2) { System.err.println("usage: CleaningRawDataOutliers inFile outFile [args...]"); System.exit(1); } fileNameIn = args[0]; fileNameOut = args[1]; boolean argsOK = true; for (int i = 2; i < args.length; i++) { String a = args[i]; try { if (a.startsWith("-alat=")) { latAirport = Double.parseDouble(a.substring(6)); } else if (a.startsWith("-alon=")) { lonAirport = Double.parseDouble(a.substring(6)); } else if (a.startsWith("-d=")) { maxDistanceFromAirportInKM = Double.parseDouble(a.substring(3)); } else if (a.startsWith("-M=")) { maxBadPoints = Integer.parseInt(a.substring(3)); } else if (a.startsWith("-m=")) { maxFractionBad = Double.parseDouble(a.substring(3)); } } catch (Exception e) { System.err.println("Error parsing argument " + a); e.printStackTrace(); argsOK = false; } } if (Double.isNaN(latAirport) || Double.isNaN(lonAirport)) { System.err.println("Please specify airport coordinates using -alat and -alon"); System.exit(1); } if (!argsOK) { System.err.println("Exiting."); System.exit(1); } System.out.println("Cleaning with params:"); System.out.println(" Input file: " + fileNameIn); System.out.println(" Output file: " + fileNameOut); System.out.println(" Airport lat, lon: " + latAirport + ", " + lonAirport); System.out.println(" Max distance from airport KM: " + maxDistanceFromAirportInKM); System.out.println(" Max bad points: " + maxBadPoints); System.out.println(" Max fraction bad: " + maxFractionBad); System.out.println(); LatLng llMan = new LatLng(latAirport, lonAirport); int countRaw = 0; int countCleaned = 0; int countPointsRemoteToAirportRemoved = 0; int countUncleanable = 0; int countGoodTracks = 0; int countEmptyTracks = 0; int countTotalInOutput = 0; try { BufferedReader in = new BufferedReader(new FileReader(fileNameIn)); PrintStream out = new PrintStream(new FileOutputStream(fileNameOut)); String line = in.readLine(); out.println(line); // copy header to output file String[] header = line.split(RawFlightTrackData.SEPARATOR); ColumnIndices columnIndices = new ColumnIndices(header, fileNameIn); while ((line = in.readLine()) != null) { countRaw++; //debug = (flightTracksOriginal.size() == 1488); if (debug) { System.out.print("AC" + flightTracksOriginal.size()); System.out.flush(); } String[] cols = line.split(RawFlightTrackData.SEPARATOR); String id = cols[columnIndices.getColumnIndex(RawFlightTrackData.HEADER_ID, false)]; id = id != null ? id : "Track" + countRaw; if (debug) System.out.println(" (" + id + ")"); boolean trackGood; String[] track = cols[columnIndices.getColumnIndex(RawFlightTrackData.HEADER_TRACK, true)].split(RawFlightTrackData.SEPARATOR_COORDS); List<PointInTrack> trackOriginal = new ArrayList<PointInTrack>(track.length / elementsPerPointInTrack); List<PointInTrack> trackUpdated = new ArrayList<PointInTrack>(track.length / elementsPerPointInTrack); flightTracksOriginal.add(trackOriginal); flightTracksUpdated.add(trackUpdated); String comment; if (track.length >= elementsPerPointInTrack) { boolean rawTrackUnaltered = true; for (int i = 0; i < track.length; i+=elementsPerPointInTrack) { double altitude = Double.parseDouble(track[i+2]); if (altitude < maxAltitudeInM) { // used to just look for zero altitude - no longer checked - we want the altitude to distinguish separate visits by the same aircraft LatLng point = new LatLng(Double.parseDouble(track[i]), Double.parseDouble(track[i+1])); double time = -1 * Double.parseDouble(track[i+3]); PointInTrack pit = new PointInTrack(point, time, altitude); if (Geography.distance(point, llMan) < (maxDistanceFromAirportInKM * 1000.0)) { // within 10km of airport (this means we omit data for GM at other airports) trackOriginal.add(pit); } else { // end of check for distance from airport rawTrackUnaltered = false; // we've dropped some points } } // end of check for altitude } // end of loop over elements in track if (!rawTrackUnaltered) { countPointsRemoteToAirportRemoved++; } // step 1-6. ExtremePointDecisionCache cache = new ExtremePointDecisionCache(); List<Integer> badPointsInOriginal = getIndicesWithBadSpeedAndAngle(trackOriginal, cache, debug); if (badPointsInOriginal.isEmpty()) { if (debug) System.out.println("All good"); comment = "good-no cleaning"; trackUpdated.addAll(trackOriginal); countGoodTracks++; trackGood = true; } else { if (debug) System.out.println("AC" + (flightTracksOriginal.size()-1) + " (" + id + ")" + " bad points:" + ArrayTools.toString(badPointsInOriginal.toArray())); trackGood = false; // step 7 onward. // now, we'll try removing the bad points and see which removals are needed to make a route // that complies with the speed/angle limits // we slowly increase the number of points being removed to avoid removing any unnecessarily boolean done = false; List<Integer> badPoints = new ArrayList<Integer>(badPointsInOriginal); List<PointInTrack> badTrack = new ArrayList<PointInTrack>(trackOriginal); while (!done && !badPoints.isEmpty() && (badPoints.size() < Math.min(maxBadPoints, (badTrack.size() * maxFractionBad)))) { // stop if we have found a valid track, or if there are too many bad points (>80% of points are bad) if (debug) System.out.println("AC" + (flightTracksOriginal.size()-1) + " (" + id + ")" + " still has " + badPoints.size() + " bad points out of " + badTrack.size()); for (int numberToRemove = 1; !done && (numberToRemove <= badPoints.size()); numberToRemove++) { if (debug) System.out.println(numberToRemove + "/" + badPoints.size()); // get the possible indices to remove int[][] allToRemove = Sets.nChooseKSets(badPoints.size(), numberToRemove); for (int removalIndex = 0; !done && (removalIndex < allToRemove.length); removalIndex++) { int[] toRemove = allToRemove[removalIndex]; // these will be indices in to the badPoints list // step 13. generate a new track with the appropriate points removed if (debug) System.out.print("removing:"); int currentIndexInPointsToRemove = 0; for (int i = 0; i < badTrack.size(); i++) { int indexToRemove = badPoints.get(toRemove[currentIndexInPointsToRemove]); if (indexToRemove == i) { // done this way for speed, as toRemove will be in ascending order if (debug) System.out.print(" " + i); // if we've dropped a point, add to the time interval for the next point so the speed in the gap is still right // ("next point" in time is actually previous point in the list in FR24 format) if (trackUpdated.size() > 0) { // don't do this for the last point in time (ie first point in the list) trackUpdated.get(trackUpdated.size() - 1).timeSinceLastPoint += badTrack.get(i).timeSinceLastPoint; } // doesn't matter if it was the last point (first point in the list) as there is no following point to update the time for currentIndexInPointsToRemove = Math.min(toRemove.length-1, currentIndexInPointsToRemove+1); } else { trackUpdated.add(badTrack.get(i).copyOf()); // need copy of to get original interval back } } if (debug) System.out.println(); // step 14. test it List<Integer> badPointsUpdated = getIndicesWithBadSpeedAndAngle(trackUpdated, cache, debug); // steps 15-16. if it's all ok, keep if (badPointsUpdated.isEmpty()) { done = true; } else { // otherwise, clear updated track and try again, unless this is the last one, in which case we'll keep the results of removing all points (this is checked in the last iteration), and update the route and list of bad points for trying again if (numberToRemove == badPoints.size()) { badPoints = badPointsUpdated; badTrack = new ArrayList<PointInTrack>(trackUpdated); } trackUpdated.clear(); } } // end of loop over possible points to remove } // end of loop over increasing number of points to remove } // end of bad point fixing loop if (done) { if (debug) System.out.println("FIXED"); comment = "fixed"; countCleaned++; trackGood = true; } else { if (debug) System.out.println("NO FIXES FOUND"); countUncleanable++; comment = "no fix found"; } } // end of bad point fixing } else { // end of check for elements in track countEmptyTracks++; trackGood = false; comment = "too few elements in track"; } comment = "(" + id + ") " + comment; flightTracksComments.add(comment); if (trackGood) { cols[columnIndices.getColumnIndex(RawFlightTrackData.HEADER_TRACK, true)] = trackToCoordsString(trackUpdated); countTotalInOutput++; out.println(ArrayTools.toString(cols, RawFlightTrackData.SEPARATOR)); } } // end of loop over file in.close(); out.close(); tracksToKML(fileNameOut + "_Cleaned.kml", flightTracksOriginal, flightTracksUpdated, flightTracksComments); System.out.println("Cleaning done."); System.out.println("Raw tracks read:" + countRaw); System.out.println("Empty tracks discarded:" + countEmptyTracks); System.out.println("Tracks amended to remove points distant from airport:" + countPointsRemoteToAirportRemoved); System.out.println("Tracks discarded as uncleanable:" + countUncleanable); System.out.println("Tracks not needing cleaned:" + countGoodTracks); System.out.println("Tracks successfully cleaned:" + countCleaned); System.out.println("Tracks written to output:" + countTotalInOutput); } catch (IOException e) { e.printStackTrace(); } } private static String trackToCoordsString(List<PointInTrack> track) { StringBuffer buf = new StringBuffer(); boolean first = true; for (PointInTrack p : track) { if (first) { first = false; } else { buf.append(RawFlightTrackData.SEPARATOR_COORDS); } buf.append(p.getLatLng().getLat()); buf.append(RawFlightTrackData.SEPARATOR_COORDS); buf.append(p.getLatLng().getLng()); buf.append(RawFlightTrackData.SEPARATOR_COORDS); buf.append(p.getAltitude()); // altitude buf.append(RawFlightTrackData.SEPARATOR_COORDS); buf.append(-1 * p.getTimeSinceLastPoint()); // convert back to negatives for consistency with original data } return buf.toString(); } /**assumes that points are in FR24 order - latest first, going back in time, with timings on each point being time from previous point (so the next point in the list)*/ private static List<Integer> getIndicesWithBadSpeedAndAngle(List<PointInTrack> points, ExtremePointDecisionCache cache, boolean debug) { List<Integer> rval = new ArrayList<Integer>(); if (points.size() < 3) { // not much to say if there are this few points return rval; } // check first element if ((points.get(0).altitude == 0) && endPointTooExtreme(points.get(0).getLatLng(), points.get(1).getLatLng(), points.get(2).getLatLng(), debug)) { rval.add(0); } for (int i = 1; i < points.size() - 1; i++) { PointInTrack prev = points.get(i - 1); PointInTrack cur = points.get(i); PointInTrack next = points.get(i + 1); if ((prev.getAltitude() == 0) && (cur.getAltitude() == 0) && (next.getAltitude() == 0)) { // only perform checks on points with zero altitude (landing/taking off ACs will be going much faster) Boolean decision = debug ? null : cache.getDecision(prev, cur, next); if (decision == null) { if (debug) System.out.print(i+":"); decision = angleAndSpeedAroundPointTooExtreme(prev.getLatLng(), cur.getLatLng(), next.getLatLng(), prev.getTimeSinceLastPoint(), cur.getTimeSinceLastPoint(), debug); cache.addDecision(prev, cur, next, decision.booleanValue()); } if (decision.booleanValue()) { rval.add(i); } } } // check last element if ((points.get(points.size() - 1).altitude == 0) && endPointTooExtreme(points.get(points.size() - 1).getLatLng(), points.get(points.size() - 2).getLatLng(), points.get(points.size() - 3).getLatLng(), debug)) { rval.add(points.size() - 1); } return rval; } /** * perform tests on a point given its neighbours in the path * currently uses fixed values for tests, noted in paper */ private static boolean angleAndSpeedAroundPointTooExtreme(LatLng prev, LatLng point, LatLng next, double timePrevToPoint, double timePointToNext, boolean debug) { double angle = Geography.angleBetweenPoints(prev, point, next); double distancePrevPoint = Geography.distance(prev, point); double distancePointNext = Geography.distance(point, next); double distancePrevNext = Geography.distance(prev, next); double speed = (distancePrevPoint + distancePointNext) / (timePrevToPoint + timePointToNext); // if two of the points are overlaid, then the angle calc is meaningless - reset to zero (no turning) if ((distancePrevPoint == 0) || (distancePointNext == 0)) { angle = 0; } // angle / speed tests // sometime it would be nice to parameterise these boolean testA1Fails = (angle > 90) && (speed > 30); boolean testA2Fails = (angle > 60) && (speed > 50); // needs to be higher speed to still keep runway turnoffs boolean testA3Fails = (angle > 150) && (speed > 10); // anything this sharp needs to be slow boolean testA4Fails = (angle > 120) && (speed > 20); // anything this sharp needs to be slow boolean testA5Fails = (angle > 130) && (speed > 16.7); boolean testA6Fails = (angle > 140) && (speed > 13.3); // distance tests boolean testD1Fails = (distancePrevPoint > 100) && (distancePrevPoint > (5 * distancePrevNext)) && (distancePointNext > (5 * distancePrevNext)); // catch massive deviations. if two points are near each other, and the point between them is very far away, it can be dropped (only do this for larger jumps, ie >10m/100m) boolean testD2Fails = ((distancePrevPoint > 250) && (distancePointNext < 10) || (distancePrevPoint < 10) && (distancePointNext > 250)); // catch massive deviations. if this point is right next to other point (within 10m), but next one is 100m, then this is probably an outlier too if (debug) System.out.println("AS " + prev + "," + point + "," + next + "," + angle + "," + speed + "," + testA1Fails + "," + testA2Fails + "," + testA3Fails+ "," + testA4Fails + "," + testA5Fails + "," + testA6Fails + "," + testD1Fails + "," + testD2Fails); return testA1Fails || testA2Fails || testA3Fails || testA4Fails || testA5Fails || testA6Fails || testD1Fails || testD2Fails; } /**only checks distance wrt to distance between the next two points in*/ private static boolean endPointTooExtreme(LatLng point, LatLng point1, LatLng point2, boolean debug) { double distancePointTo1 = Geography.distance(point, point1); double distance1To2 = Geography.distance(point1, point2); boolean testD1Fails = (distancePointTo1 > 100) && (distancePointTo1 > (20 * distance1To2)); // catch massive deviations if (debug) System.out.println("EP " + point + "," + point1 + "," + point2 + "," + distancePointTo1 + "," + distance1To2 + "," + testD1Fails); return testD1Fails; } private static void tracksToKML(String filename, List<List<PointInTrack>> originalRoutes, List<List<PointInTrack>> updatedRoutes, List<String> comments) { final Kml kml = KmlFactory.createKml(); final Document document = kml.createAndSetDocument().withName(filename).withOpen(true); Document[] documents = new Document[originalRoutes.size()]; for (int i = 0; i < documents.length; i++) { documents[i] = document.createAndAddDocument().withName(i + "_" + comments.get(i)).withOpen(false); } final Style styleOriginal = document.createAndAddStyle().withId("linestyleOriginal"); styleOriginal.createAndSetLineStyle().withColor("ff0000ff").withWidth(4.0d); final Style styleUpdated = document.createAndAddStyle().withId("linestyleUpdated"); styleUpdated.createAndSetLineStyle().withColor("ffff0000").withWidth(4.0d); for (int i = 0; i < documents.length; i++) { LineString lsO = documents[i].createAndAddPlacemark().withName("O").withStyleUrl("#linestyleOriginal").withVisibility(false).createAndSetLineString(); //lsO.setAltitudeMode(AltitudeMode.RELATIVE_TO_GROUND); for (PointInTrack pip : originalRoutes.get(i)) { if (pip.getAltitude() == 0) { LatLng ll = pip.getLatLng(); lsO.addToCoordinates(ll.getLng() + "," + ll.getLat() + ",0.0");// + pip.getAltitude()); // dropped altitude stuff as the track was sometimes hidden by small bumps in terrain } } LineString lsU = documents[i].createAndAddPlacemark().withName("U").withStyleUrl("#linestyleUpdated").withVisibility(false).createAndSetLineString(); for (PointInTrack pip : updatedRoutes.get(i)) { if (pip.getAltitude() == 0) { LatLng ll = pip.getLatLng(); lsU.addToCoordinates(ll.getLng() + "," + ll.getLat() + ",0,0");// + pip.getAltitude()); } } } KMLUtils.addGroundOverlayToKMLDocument(filename, document); try { if (kml.marshal(new File(filename))) { System.out.println(filename + " written successfully"); } else { System.out.println(filename + " not written"); } } catch (IOException e) { e.printStackTrace(); } } // this could be made far more efficient if we just used indices into the original track rather than object lists // that way the cache could be 2 3D arrays of booleans; one for whether the test result is in the cache, one for test results private static class ExtremePointDecisionCache { // outer map is prev point, middle map is current point, inner map is next point private Map<PointInTrack, Map<PointInTrack, Map<PointInTrack, Boolean>>> cache; public ExtremePointDecisionCache() { this.cache = new HashMap<PointInTrack, Map<PointInTrack, Map<PointInTrack, Boolean>>>(); } /**decision is null if cache miss, true/false otherwise*/ public Boolean getDecision(PointInTrack prev, PointInTrack current, PointInTrack next) { Map<PointInTrack, Map<PointInTrack, Boolean>> m1 = this.cache.get(prev); if (m1 != null) { Map<PointInTrack, Boolean> m2 = m1.get(current); if (m2 != null) { Boolean b = m2.get(next); return b; } } return null; } public void addDecision(PointInTrack prev, PointInTrack current, PointInTrack next, boolean decision) { Map<PointInTrack, Map<PointInTrack, Boolean>> m1 = this.cache.get(prev); if (m1 == null) { m1 = new HashMap<PointInTrack, Map<PointInTrack, Boolean>>(); this.cache.put(prev, m1); } Map<PointInTrack, Boolean> m2 = m1.get(current); if (m2 == null) { m2 = new HashMap<PointInTrack, Boolean>(); m1.put(current, m2); } m2.put(next, Boolean.valueOf(decision)); } } private static class PointInTrack { private LatLng latLng; private double timeSinceLastPoint; private double altitude; public PointInTrack(LatLng latLng, double timeSinceLastPoint, double altitude) { this.latLng = latLng; this.timeSinceLastPoint = timeSinceLastPoint; this.altitude = altitude; } public LatLng getLatLng() { return latLng; } public double getTimeSinceLastPoint() { return timeSinceLastPoint; } public double getAltitude() { return altitude; } public PointInTrack copyOf() { return new PointInTrack(latLng, timeSinceLastPoint, altitude); } @Override public String toString() { StringBuffer buf = new StringBuffer(); buf.append("PIT["); buf.append(latLng.getLat()); buf.append(","); buf.append(latLng.getLng()); buf.append(","); buf.append(timeSinceLastPoint); buf.append(","); buf.append(altitude); buf.append("]"); return buf.toString(); } } }
/* * Copyright 2020 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.spinnaker.clouddriver.kubernetes.description.manifest; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.entry; import com.google.common.collect.ImmutableMap; import com.netflix.spinnaker.clouddriver.kubernetes.description.manifest.KubernetesManifestStrategy.DeployStrategy; import com.netflix.spinnaker.clouddriver.kubernetes.description.manifest.KubernetesManifestStrategy.Versioned; import java.util.HashMap; import java.util.Map; import java.util.OptionalInt; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; @RunWith(JUnitPlatform.class) final class KubernetesManifestStrategyTest { @Test void deployStrategyDefaultsToApply() { KubernetesManifestStrategy.DeployStrategy strategy = KubernetesManifestStrategy.DeployStrategy.fromAnnotations(ImmutableMap.of()); assertThat(strategy).isEqualTo(DeployStrategy.APPLY); } @Test void otherStrategiesFalse() { KubernetesManifestStrategy.DeployStrategy strategy = KubernetesManifestStrategy.DeployStrategy.fromAnnotations( ImmutableMap.of( "strategy.spinnaker.io/recreate", "false", "strategy.spinnaker.io/replace", "false")); assertThat(strategy).isEqualTo(DeployStrategy.APPLY); } @Test void recreateStrategy() { KubernetesManifestStrategy.DeployStrategy strategy = KubernetesManifestStrategy.DeployStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/recreate", "true")); assertThat(strategy).isEqualTo(DeployStrategy.RECREATE); } @Test void replaceStrategy() { KubernetesManifestStrategy.DeployStrategy strategy = KubernetesManifestStrategy.DeployStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/replace", "true")); assertThat(strategy).isEqualTo(DeployStrategy.REPLACE); } @Test void nonBooleanValue() { KubernetesManifestStrategy.DeployStrategy strategy = KubernetesManifestStrategy.DeployStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/replace", "zzzz")); assertThat(strategy).isEqualTo(DeployStrategy.APPLY); } @Test void recreatePreferredOverReplace() { KubernetesManifestStrategy.DeployStrategy strategy = KubernetesManifestStrategy.DeployStrategy.fromAnnotations( ImmutableMap.of( "strategy.spinnaker.io/replace", "true", "strategy.spinnaker.io/recreate", "true")); assertThat(strategy).isEqualTo(DeployStrategy.RECREATE); } @Test void applyToAnnotations() { Map<String, String> annotations = DeployStrategy.APPLY.toAnnotations(); assertThat(annotations).isEmpty(); } @Test void recreateToAnnotations() { Map<String, String> annotations = DeployStrategy.RECREATE.toAnnotations(); assertThat(annotations).containsOnly(entry("strategy.spinnaker.io/recreate", "true")); } @Test void replaceToAnnotations() { Map<String, String> annotations = DeployStrategy.REPLACE.toAnnotations(); assertThat(annotations).containsOnly(entry("strategy.spinnaker.io/replace", "true")); } @Test void versionedDefaultsToDefault() { KubernetesManifestStrategy.Versioned versioned = KubernetesManifestStrategy.Versioned.fromAnnotations(ImmutableMap.of()); assertThat(versioned).isEqualTo(Versioned.DEFAULT); } @Test void versionedTrue() { KubernetesManifestStrategy.Versioned versioned = KubernetesManifestStrategy.Versioned.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/versioned", "true")); assertThat(versioned).isEqualTo(Versioned.TRUE); } @Test void versionedFalse() { KubernetesManifestStrategy.Versioned versioned = KubernetesManifestStrategy.Versioned.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/versioned", "false")); assertThat(versioned).isEqualTo(Versioned.FALSE); } @Test void versionedNonsense() { KubernetesManifestStrategy.Versioned versioned = KubernetesManifestStrategy.Versioned.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/versioned", "zzz")); assertThat(versioned).isEqualTo(Versioned.FALSE); } @Test void versionedDefaultToAnnotations() { Map<String, String> annotations = Versioned.DEFAULT.toAnnotations(); assertThat(annotations).isEmpty(); } @Test void versionedTrueToAnnotations() { Map<String, String> annotations = Versioned.TRUE.toAnnotations(); assertThat(annotations).containsOnly(entry("strategy.spinnaker.io/versioned", "true")); } @Test void versionedFalseToAnnotations() { Map<String, String> annotations = Versioned.FALSE.toAnnotations(); assertThat(annotations).containsOnly(entry("strategy.spinnaker.io/versioned", "false")); } @Test void fromEmptyAnnotations() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.fromAnnotations(ImmutableMap.of()); assertThat(strategy.getDeployStrategy()).isEqualTo(DeployStrategy.APPLY); assertThat(strategy.getVersioned()).isEqualTo(Versioned.DEFAULT); assertThat(strategy.getMaxVersionHistory()).isEqualTo(OptionalInt.empty()); assertThat(strategy.isUseSourceCapacity()).isFalse(); } @Test void fromDeployStrategyAnnotation() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/replace", "true")); assertThat(strategy.getDeployStrategy()).isEqualTo(DeployStrategy.REPLACE); } @Test void fromVersionedAnnotation() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/versioned", "true")); assertThat(strategy.getVersioned()).isEqualTo(Versioned.TRUE); } @Test void fromMaxVersionHistoryAnnotation() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/max-version-history", "10")); assertThat(strategy.getMaxVersionHistory()).isEqualTo(OptionalInt.of(10)); } @Test void fromNonIntegerMaxVersionHistoryAnnotation() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/max-version-history", "zz")); assertThat(strategy.getMaxVersionHistory()).isEqualTo(OptionalInt.empty()); } @Test void fromUseSourceCapacityAnnotation() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/use-source-capacity", "true")); assertThat(strategy.isUseSourceCapacity()).isTrue(); } @Test void fromUseSourceCapacityAnnotationFalse() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/use-source-capacity", "false")); assertThat(strategy.isUseSourceCapacity()).isFalse(); } @Test void fromUseSourceCapacityAnnotationNonsense() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.fromAnnotations( ImmutableMap.of("strategy.spinnaker.io/use-source-capacity", "zzz")); assertThat(strategy.isUseSourceCapacity()).isFalse(); } @Test void allAnnotationsPresent() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.fromAnnotations( ImmutableMap.of( "strategy.spinnaker.io/replace", "true", "strategy.spinnaker.io/versioned", "true", "strategy.spinnaker.io/max-version-history", "20", "strategy.spinnaker.io/use-source-capacity", "true", "strategy.spinnaker.io/random-annotation", "abc")); assertThat(strategy.getDeployStrategy()).isEqualTo(DeployStrategy.REPLACE); assertThat(strategy.getVersioned()).isEqualTo(Versioned.TRUE); assertThat(strategy.getMaxVersionHistory()).isEqualTo(OptionalInt.of(20)); assertThat(strategy.isUseSourceCapacity()).isTrue(); } @Test void builderDefaults() { KubernetesManifestStrategy strategy = KubernetesManifestStrategy.builder().build(); assertThat(strategy.getDeployStrategy()).isEqualTo(DeployStrategy.APPLY); assertThat(strategy.getVersioned()).isEqualTo(Versioned.DEFAULT); assertThat(strategy.getMaxVersionHistory()).isEqualTo(OptionalInt.empty()); assertThat(strategy.isUseSourceCapacity()).isFalse(); } @Test void emptyAnnotations() { Map<String, String> annotations = KubernetesManifestStrategy.builder().build().toAnnotations(); assertThat(annotations).isEmpty(); } @Test void deployStrategyRecreateToAnnotations() { Map<String, String> annotations = KubernetesManifestStrategy.builder() .deployStrategy(DeployStrategy.RECREATE) .build() .toAnnotations(); assertThat(annotations).containsOnly(entry("strategy.spinnaker.io/recreate", "true")); } @Test void deployStrategyReplaceToAnnotations() { Map<String, String> annotations = KubernetesManifestStrategy.builder() .deployStrategy(DeployStrategy.REPLACE) .build() .toAnnotations(); assertThat(annotations).containsOnly(entry("strategy.spinnaker.io/replace", "true")); } @Test void versionedToAnnotations() { Map<String, String> annotations = KubernetesManifestStrategy.builder().versioned(Versioned.FALSE).build().toAnnotations(); assertThat(annotations).containsOnly(entry("strategy.spinnaker.io/versioned", "false")); } @Test void maxVersionHistoryToAnnotations() { Map<String, String> annotations = KubernetesManifestStrategy.builder().maxVersionHistory(10).build().toAnnotations(); assertThat(annotations).containsOnly(entry("strategy.spinnaker.io/max-version-history", "10")); } @Test void useSourceCapacityToAnnotations() { Map<String, String> annotations = KubernetesManifestStrategy.builder().useSourceCapacity(true).build().toAnnotations(); assertThat(annotations) .containsOnly(entry("strategy.spinnaker.io/use-source-capacity", "true")); } @ParameterizedTest @EnumSource(DeployStrategy.class) void deploymentStrategySetsAnnotations(DeployStrategy deployStrategy) { Map<String, String> annotations = new HashMap<>(); deployStrategy.setAnnotations(annotations); assertThat(annotations).isEqualTo(deployStrategy.toAnnotations()); } @ParameterizedTest @EnumSource(DeployStrategy.class) void deploymentStrategyOverwritesAnnotations(DeployStrategy deployStrategy) { Map<String, String> annotations = new HashMap<>(DeployStrategy.RECREATE.toAnnotations()); deployStrategy.setAnnotations(annotations); assertThat(annotations).isEqualTo(deployStrategy.toAnnotations()); } @ParameterizedTest @EnumSource(DeployStrategy.class) void deploymentStrategyIgnoresIrrelevantAnnotations(DeployStrategy deployStrategy) { ImmutableMap<String, String> irrelevantAnnotations = ImmutableMap.of( "strategy.spinnaker.io/versioned", "false", "artifact.spinnaker.io/version", "v001", "my-custom-annotation", "my-custom-value"); Map<String, String> annotations = new HashMap<>(irrelevantAnnotations); deployStrategy.setAnnotations(annotations); assertThat(annotations).containsAllEntriesOf(irrelevantAnnotations); } @Test void toAnnotationsMultipleAnnotations() { Map<String, String> annotations = KubernetesManifestStrategy.builder() .deployStrategy(DeployStrategy.RECREATE) .versioned(Versioned.TRUE) .maxVersionHistory(30) .useSourceCapacity(true) .build() .toAnnotations(); assertThat(annotations) .containsOnly( entry("strategy.spinnaker.io/recreate", "true"), entry("strategy.spinnaker.io/versioned", "true"), entry("strategy.spinnaker.io/max-version-history", "30"), entry("strategy.spinnaker.io/use-source-capacity", "true")); } }
package org.radargun.stages.cache; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.radargun.DistStageAck; import org.radargun.config.Property; import org.radargun.config.Stage; import org.radargun.stages.AbstractDistStage; import org.radargun.stages.cache.generators.ByteArrayValueGenerator; import org.radargun.stages.cache.generators.KeyGenerator; import org.radargun.stages.cache.generators.StringKeyGenerator; import org.radargun.stages.cache.generators.ValueGenerator; import org.radargun.stages.helpers.BucketPolicy; import org.radargun.stages.helpers.Range; import org.radargun.state.SlaveState; import org.radargun.traits.BasicOperations; import org.radargun.traits.CacheInformation; import org.radargun.traits.Debugable; import org.radargun.traits.InMemoryBasicOperations; import org.radargun.traits.InjectTrait; /** * @author Radim Vansa &lt;[email protected]&gt; */ @Stage(doc = "Stage for checking presence or absence of data entered in other stages.") public class CheckCacheDataStage extends AbstractDistStage { @Property(optional = false, doc = "Number of entries with key in form specified by the last used key generator, in the cache.") private int numEntries; @Property(doc = "Index of key of first entry. This number will be multiplied by slaveIndex. Default is 0.") private int firstEntryOffset = 0; @Property(doc = "Number of entries that will be checked in each step. Default is 1.") private int checkEntryCount = 1; @Property(doc = "Number of entries stepped in each step. Default is 1.") private int stepEntryCount = 1; @Property(optional = false, doc = "Number of bytes carried in single entry.") private int entrySize; @Property(doc = "Entries that do not have the expected form but occur in the cluster. This string specifies " + "a polynomial in number of slaves: 1,2,3 with 4 slaves would result in 1 + 2*4 + 3*4*4 = 57 extra entries." + "Defaults to 0.") private String extraEntries; @Property(doc = "Number of thread per node which check data validity. Default is 1.") private int checkThreads = 1; @Property(doc = "Usually the test checks that sum of local nodes = numOwners * numEntries + extraEntries." + "This option disables such behaviour. Default is false.") private boolean ignoreSum = false; @Property(doc = "If true, the entries are not retrieved, this stage only checks that the sum of entries from local nodes is correct. Default is false.") private boolean sizeOnly = false; @Property(doc = "Hint how many slaves are currently alive - if set to > 0 then the query for amount of entries in" + "this cache is postponed until the cache appears to be fully replicated. By default this is disabled.") private int liveSlavesHint = -1; @Property(doc = "If set to true, we are checking that the data are NOT in the cluster anymore. Default is false.") private boolean deleted = false; @Property(doc = "Number of queries after which a DEBUG log message is printed. Default is 10000.") private int logChecksCount = 10000; @Property(doc = "If the GET request results in null response, call wrapper-specific functions to show debug info. " + "Default is false.") private boolean debugNull = false; @Property(doc = "If entry is null, fail immediatelly. Default is false.") private boolean failOnNull = false; @Property(doc = "If the cache wrapper supports persistent storage and this is set to true, the check " + "will be executed only against in-memory data. Default is false.") private boolean memoryOnly = false; // TODO: better names, even when these are kind of hacks @Property(doc = "Check whether the sum of subparts sizes is the same as local size. Default is false.") private boolean checkSubpartsSumLocal = false; @Property(doc = "Check whether the same subparts from each cache have the same size. Default is false.") private boolean checkSubpartsEqual = false; @Property(doc = "Check that number of non-zero subparts is equal to number of replicas. Default is false.") private boolean checkSubpartsAreReplicas = false; private transient KeyGenerator keyGenerator; @InjectTrait(dependency = InjectTrait.Dependency.MANDATORY) protected BasicOperations basicOperations; @InjectTrait protected InMemoryBasicOperations inMemoryBasicOperations; @InjectTrait(dependency = InjectTrait.Dependency.MANDATORY) protected CacheInformation cacheInformation; @InjectTrait protected Debugable debugable; protected BasicOperations.Cache basicCache; protected Debugable.Cache debugableCache; @Override public DistStageAck executeOnSlave() { if (!shouldExecute()) { return successfulResponse(); } if (!isServiceRunnning()) { // this slave is dead and does not participate on check return successfulResponse(); } if (!sizeOnly) { keyGenerator = (KeyGenerator) slaveState.get(KeyGenerator.KEY_GENERATOR); if (keyGenerator == null) { keyGenerator = new StringKeyGenerator(); } CheckResult result = new CheckResult(); if (memoryOnly && inMemoryBasicOperations != null) { basicCache = inMemoryBasicOperations.getMemoryOnlyCache(getCacheName()); } else { basicCache = basicOperations.getCache(getCacheName()); } if (debugable != null){ debugableCache = debugable.getCache(getCacheName()); } try { if (checkThreads <= 1) { ValueChecker checker = new GeneratedValueChecker((ValueGenerator) slaveState.get(ValueGenerator.VALUE_GENERATOR)); int entriesToCheck = numEntries; for (int i = firstEntryOffset * slaveState.getSlaveIndex(); entriesToCheck > 0; i += stepEntryCount) { int checkAmount = Math.min(checkEntryCount, entriesToCheck); for (int j = 0; j < checkAmount; ++j) { if (!checkKey(basicCache, debugableCache, i + j, result, checker)) { entriesToCheck = 0; break; } } entriesToCheck -= checkAmount; } } else { ExecutorService executor = Executors.newFixedThreadPool(checkThreads); List<Callable<CheckResult>> tasks = new ArrayList<Callable<CheckResult>>(); for (int i = 0; i < checkThreads; ++i) { Range range = Range.divideRange(numEntries, checkThreads, i); tasks.add(new CheckRangeTask(range.getStart(), range.getEnd())); } for (Future<CheckResult> future : executor.invokeAll(tasks)) { CheckResult value = future.get(); result.merge(value); } } } catch (Exception e) { return errorResponse("Failed to check entries", e); } if (!isDeleted()) { if (result.found != getExpectedNumEntries()) { return new InfoAck(slaveState, result).error("Found " + result.found + " entries while " + getExpectedNumEntries() + " should be loaded."); } } else { if (result.found > 0) { return new InfoAck(slaveState, result).error("Found " + result.found + " entries while these should be deleted."); } } } CacheInformation.Cache info = cacheInformation.getCache(getCacheName()); if (liveSlavesHint > 0) { // try to wait until data are properly replicated int myExpectedSize; int extraEntries = getExtraEntries(); int commonEntries = isDeleted() ? 0 : numEntries; int numOwners = info.getNumReplicas(); if (numOwners < 0) { myExpectedSize = -numOwners * slaveState.getClusterSize() * (commonEntries + extraEntries) / liveSlavesHint; } else { myExpectedSize = numOwners * (commonEntries + extraEntries) / liveSlavesHint; } for (int attempt = 0; attempt < 5; ++attempt) { int local = info.getLocalSize(); double ratio = (double) local / (double) myExpectedSize; if (ratio < 0.9 || ratio > 1.1) { log.warn("Local size (" + local + ") differs substantially from expected size (" + myExpectedSize + "), waiting 30s to let it replicate"); try { Thread.sleep(30000); } catch (InterruptedException e) { break; } } else break; } } return new InfoAck(slaveState, info.getLocalSize(), info.getStructuredSize(), info.getNumReplicas()); } private String getCacheName() { return (String) slaveState.get(BucketPolicy.LAST_BUCKET); } private class CheckRangeTask implements Callable<CheckResult> { private int from, to; public CheckRangeTask(int from, int to) { this.from = from; this.to = to; } @Override public CheckResult call() throws Exception { try { CheckResult result = new CheckResult(); ValueChecker checker = new GeneratedValueChecker((ValueGenerator) slaveState.get(ValueGenerator.VALUE_GENERATOR)); String bucketId = getCacheName(); int entriesToCheck = to - from; for (int i = from * (stepEntryCount / checkEntryCount) + firstEntryOffset * slaveState.getSlaveIndex(); entriesToCheck > 0; i += stepEntryCount) { int checkAmount = Math.min(checkEntryCount, entriesToCheck); for (int j = 0; j < checkAmount; ++j) { if (!checkKey(basicCache, debugableCache, i + j, result, checker)) { entriesToCheck = 0; break; } } entriesToCheck -= checkAmount; } return result; } catch (Exception e) { log.error("Failed to check entries", e); return null; } } } protected int getExpectedNumEntries() { return numEntries; } protected boolean checkKey(BasicOperations.Cache basicCache, Debugable.Cache debugableCache, int keyIndex, CheckResult result, ValueChecker checker) { Object key = keyGenerator.generateKey(keyIndex); try { Object value = basicCache.get(key); if (!isDeleted()) { if (value != null && checker.check(keyIndex, value)) { result.found++; } else { if (value == null) { result.nullValues++; if (debugNull && debugableCache != null) { debugableCache.debugInfo(); debugableCache.debugKey(key); } if (failOnNull) { return false; } } else { result.invalidValues++; } unexpected(key, value); } } else { if (value != null) { result.found++; shouldBeDeleted(key, value); } else { result.nullValues++; } } } catch (Exception e) { if (result.exceptions == 0) { log.error("Error retrieving value for key " + key, e); } else if (log.isTraceEnabled()) { log.trace("Error retrieving value for key " + key, e); } result.exceptions++; } finally { result.checked++; if (result.checked % logChecksCount == 0) { log.debug("Checked so far: " + result); } } return true; } protected void shouldBeDeleted(Object key, Object value) { if (log.isTraceEnabled()) { log.trace("Key " + key + " still has value " + value); } } protected void unexpected(Object key, Object value) { if (log.isTraceEnabled()) { log.trace("Key " + key + " has unexpected value " + value); } } @Override public boolean processAckOnMaster(List<DistStageAck> acks) { boolean success = super.processAckOnMaster(acks); if (!success) { return false; } int sumSize = 0; Integer numReplicas = null; Map<Object, Map<Integer, Integer>> subparts = new HashMap<Object, Map<Integer, Integer>>(); for (DistStageAck ack : acks) { if (!(ack instanceof InfoAck)) { continue; } InfoAck info = (InfoAck) ack; log.debug("Slave " + ack.getSlaveIndex() + " has local size " + info.localSize); sumSize += info.localSize; if (numReplicas == null) numReplicas = info.numReplicas; else if (numReplicas != info.numReplicas) { log.error("Slave " + ack.getSlaveIndex() + " reports " + info.numReplicas + " replicas but other slave reported " + numReplicas); success = false; } int sumSubpartSize = 0; for (Map.Entry<?, Integer> subpart : info.structuredSize.entrySet()) { log.trace("Subpart " + subpart.getKey() + " = " + subpart.getValue()); if (subpart.getValue() == 0) continue; sumSubpartSize += subpart.getValue(); Map<Integer, Integer> otherSubparts = subparts.get(subpart.getKey()); if (otherSubparts == null) { subparts.put(subpart.getKey(), new HashMap<Integer, Integer>(Collections.singletonMap(info.getSlaveIndex(), subpart.getValue()))); } else if (checkSubpartsEqual) { for (Map.Entry<Integer, Integer> os : otherSubparts.entrySet()) { if ((int) subpart.getValue() != (int) os.getValue()) { log.error(String.format("Slave %d reports %s = %d but slave %d reported size %d", info.getSlaveIndex(), subpart.getKey(), subpart.getValue(), os.getKey(), os.getValue())); success = false; } } otherSubparts.put(info.getSlaveIndex(), subpart.getValue()); } } if (checkSubpartsSumLocal && sumSubpartSize != info.localSize) { log.error(String.format("On slave %d sum of subparts sizes (%d) is not the same as local size (%d)", info.getSlaveIndex(), sumSubpartSize, info.localSize)); success = false; } } if (checkSubpartsAreReplicas) { for (Map.Entry<Object, Map<Integer, Integer>> subpart : subparts.entrySet()) { if (subpart.getValue().size() != numReplicas) { log.error(String.format("Subpart %s was found in %s, should have %d replicas.", subpart.getKey(), subpart.getValue().keySet(), numReplicas)); success = false; } } } if (ignoreSum) { log.info("The sum size is " + sumSize); } else { int expectedSize; int extraEntries = getExtraEntries(); int commonEntries = isDeleted() ? 0 : numEntries; if (numReplicas < 0) { expectedSize = -numReplicas * masterState.getClusterSize() * (commonEntries + extraEntries); } else { expectedSize = numReplicas * (commonEntries + extraEntries); } if (expectedSize != sumSize) { log.error("The cache should contain " + expectedSize + " entries (including backups) but contains " + sumSize + " entries."); success = false; } else { log.trace("The sum size is " + sumSize + " entries as expected"); } } return success; } public int getNumEntries() { return this.numEntries; } private int getExtraEntries() { if (extraEntries == null) return 0; int sum = 0; int multiplicator = 1; try { for (String entries : extraEntries.split(",")) { int count = Integer.parseInt(entries); sum += count * multiplicator; multiplicator *= slaveState.getClusterSize(); } } catch (NumberFormatException e) { log.error("Cannot parse " + extraEntries); } return sum; } public boolean isDeleted() { return deleted; } protected static class InfoAck extends DistStageAck { final long localSize; final Map<?, Integer> structuredSize; final int numReplicas; final CheckResult checkResult; public InfoAck(SlaveState slaveState, long localSize, Map<?, Integer> structuredSize, int numReplicas) { super(slaveState); this.localSize = localSize; this.structuredSize = structuredSize; this.numReplicas = numReplicas; checkResult = null; } public InfoAck(SlaveState slaveState, CheckResult checkResult) { super(slaveState); this.checkResult = checkResult; localSize = -1; structuredSize = null; numReplicas = -1; } @Override public String toString() { return "InfoAck{" + "localSize=" + localSize + ", numReplicas=" + numReplicas + ", checkResult=" + checkResult + "} " + super.toString(); } } protected static class CheckResult implements Serializable { public long checked; public long found; public long nullValues; public long invalidValues; public long exceptions; public void merge(CheckResult value) { if (value == null) return; checked += value.checked; found += value.found; nullValues += value.nullValues; invalidValues += value.invalidValues; exceptions += value.invalidValues; } @Override public String toString() { return String.format("[checked=%d, found=%d, nullValues=%d, invalidValues=%d, exceptions=%d]", checked, found, nullValues, invalidValues, exceptions); } } protected interface ValueChecker { boolean check(int keyIndex, Object value); } protected class GeneratedValueChecker implements ValueChecker { private final ValueGenerator valueGenerator; public GeneratedValueChecker(ValueGenerator valueGenerator) { this.valueGenerator = valueGenerator == null ? new ByteArrayValueGenerator() : valueGenerator; } @Override public boolean check(int keyIndex, Object value) { return valueGenerator.checkValue(value, entrySize); } } }
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config; import com.thoughtworks.go.config.materials.dependency.DependencyMaterialConfig; import com.thoughtworks.go.config.remote.*; import com.thoughtworks.go.domain.CaseInsensitiveStringTest; import com.thoughtworks.go.domain.PipelineGroups; import com.thoughtworks.go.domain.PiplineConfigVisitor; import com.thoughtworks.go.helper.*; import org.hamcrest.core.Is; import org.junit.Before; import org.junit.Test; import java.util.*; import static com.thoughtworks.go.helper.PipelineConfigMother.createGroup; import static com.thoughtworks.go.helper.PipelineConfigMother.createPipelineConfig; import static java.util.Arrays.asList; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.not; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; public class BasicCruiseConfigTest extends CruiseConfigTestBase { @Before public void setup() throws Exception { pipelines = new BasicPipelineConfigs("existing_group", new Authorization()); cruiseConfig = new BasicCruiseConfig(pipelines); goConfigMother = new GoConfigMother(); } @Override protected BasicCruiseConfig createCruiseConfig(BasicPipelineConfigs pipelineConfigs) { return new BasicCruiseConfig(pipelineConfigs); } @Override protected BasicCruiseConfig createCruiseConfig() { return new BasicCruiseConfig(); } @Test public void getAllLocalPipelineConfigs_shouldReturnOnlyLocalPipelinesWhenNoRemotes() { PipelineConfig pipeline1 = createPipelineConfig("local-pipe-1", "stage1"); cruiseConfig.getGroups().addPipeline("existing_group", pipeline1); List<PipelineConfig> localPipelines = cruiseConfig.getAllLocalPipelineConfigs(false); assertThat(localPipelines.size(), is(1)); assertThat(localPipelines, hasItem(pipeline1)); } @Test public void shouldGenerateAMapOfAllPipelinesAndTheirParentDependencies() { /* * -----+ p2 --> p4 * p1 * -----+ p3 * * */ PipelineConfig p1 = createPipelineConfig("p1", "s1", "j1"); PipelineConfig p2 = createPipelineConfig("p2", "s2", "j1"); p2.addMaterialConfig(new DependencyMaterialConfig(new CaseInsensitiveString("p1"), new CaseInsensitiveString("s1"))); PipelineConfig p3 = createPipelineConfig("p3", "s3", "j1"); p3.addMaterialConfig(new DependencyMaterialConfig(new CaseInsensitiveString("p1"), new CaseInsensitiveString("s1"))); PipelineConfig p4 = createPipelineConfig("p4", "s4", "j1"); p4.addMaterialConfig(new DependencyMaterialConfig(new CaseInsensitiveString("p2"), new CaseInsensitiveString("s2"))); pipelines.addAll(asList(p4, p2, p1, p3)); Map<String, List<PipelineConfig>> expectedPipelines = cruiseConfig.generatePipelineVsDownstreamMap(); assertThat(expectedPipelines.size(), is(4)); assertThat(expectedPipelines.get("p1"), hasItems(p2, p3)); assertThat(expectedPipelines.get("p2"), hasItems(p4)); assertThat(expectedPipelines.get("p3").isEmpty(), is(true)); assertThat(expectedPipelines.get("p4").isEmpty(), is(true)); } @Test public void shouldSetOriginInPipelines() { pipelines = new BasicPipelineConfigs("group_main", new Authorization(), PipelineConfigMother.pipelineConfig("pipe1")); BasicCruiseConfig mainCruiseConfig = new BasicCruiseConfig(pipelines); PipelineConfig pipe = pipelines.get(0); mainCruiseConfig.setOrigins(new FileConfigOrigin()); assertThat(pipe.getOrigin(), Is.<ConfigOrigin>is(new FileConfigOrigin())); } @Test public void shouldSetOriginInEnvironments() { BasicCruiseConfig mainCruiseConfig = new BasicCruiseConfig(pipelines); BasicEnvironmentConfig env = new BasicEnvironmentConfig(new CaseInsensitiveString("e")); mainCruiseConfig.addEnvironment(env); mainCruiseConfig.setOrigins(new FileConfigOrigin()); assertThat(env.getOrigin(), Is.<ConfigOrigin>is(new FileConfigOrigin())); } @Test public void shouldGetPipelinesWithGroupName() throws Exception { PipelineConfigs group1 = createGroup("group1", createPipelineConfig("pipeline1", "stage1")); PipelineConfigs group2 = createGroup("group2", createPipelineConfig("pipeline2", "stage2")); CruiseConfig config = createCruiseConfig(); config.setGroup(new PipelineGroups(group1, group2)); assertThat(config.pipelines("group1"), is(group1)); assertThat(config.pipelines("group2"), is(group2)); } @Test public void shouldReturnTrueForPipelineThatInFirstGroup() { PipelineConfigs group1 = createGroup("group1", createPipelineConfig("pipeline1", "stage1")); CruiseConfig config = new BasicCruiseConfig(group1); assertThat("shouldReturnTrueForPipelineThatInFirstGroup", config.isInFirstGroup(new CaseInsensitiveString("pipeline1")), is(true)); } @Test public void shouldReturnFalseForPipelineThatNotInFirstGroup() { PipelineConfigs group1 = createGroup("group1", createPipelineConfig("pipeline1", "stage1")); PipelineConfigs group2 = createGroup("group2", createPipelineConfig("pipeline2", "stage2")); CruiseConfig config = new BasicCruiseConfig(group1, group2); assertThat("shouldReturnFalseForPipelineThatNotInFirstGroup", config.isInFirstGroup(new CaseInsensitiveString("pipeline2")), is(false)); } @Test public void shouldIncludeRemotePipelinesAsPartOfCachedPipelineConfigs() { BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("p1", "p2"); ConfigRepoConfig repoConfig1 = new ConfigRepoConfig(MaterialConfigsMother.gitMaterialConfig("url1"), "plugin"); ConfigRepoConfig repoConfig2 = new ConfigRepoConfig(MaterialConfigsMother.gitMaterialConfig("url2"), "plugin"); cruiseConfig.setConfigRepos(new ConfigReposConfig(repoConfig1, repoConfig2)); PartialConfig partialConfigInRepo1 = PartialConfigMother.withPipeline("pipeline_in_repo1", new RepoConfigOrigin(repoConfig1, "repo1_r1")); PartialConfig partialConfigInRepo2 = PartialConfigMother.withPipeline("pipeline_in_repo2", new RepoConfigOrigin(repoConfig2, "repo2_r1")); cruiseConfig.merge(asList(partialConfigInRepo1, partialConfigInRepo2), false); assertThat(cruiseConfig.getAllPipelineNames().contains(new CaseInsensitiveString("pipeline_in_repo1")), is(true)); assertThat(cruiseConfig.getAllPipelineNames().contains(new CaseInsensitiveString("pipeline_in_repo2")), is(true)); } @Test public void shouldRejectRemotePipelinesNotOriginatingFromRegisteredConfigReposFromCachedPipelineConfigs() { BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("p1", "p2"); ConfigRepoConfig repoConfig1 = new ConfigRepoConfig(MaterialConfigsMother.gitMaterialConfig("url1"), "plugin"); ConfigRepoConfig repoConfig2 = new ConfigRepoConfig(MaterialConfigsMother.gitMaterialConfig("url2"), "plugin"); cruiseConfig.setConfigRepos(new ConfigReposConfig(repoConfig2)); PartialConfig partialConfigInRepo1 = PartialConfigMother.withPipeline("pipeline_in_repo1", new RepoConfigOrigin(repoConfig1, "repo1_r1")); PartialConfig partialConfigInRepo2 = PartialConfigMother.withPipeline("pipeline_in_repo2", new RepoConfigOrigin(repoConfig2, "repo2_r1")); cruiseConfig.merge(asList(partialConfigInRepo1, partialConfigInRepo2), false); assertThat(cruiseConfig.getAllPipelineNames().contains(new CaseInsensitiveString("pipeline_in_repo1")), is(false)); assertThat(cruiseConfig.getAllPipelineNames().contains(new CaseInsensitiveString("pipeline_in_repo2")), is(true)); } @Test public void shouldReturnAListOfPipelineNamesAssociatedWithOneTemplate() { ArrayList<CaseInsensitiveString> pipelinesAssociatedWithATemplate = new ArrayList<>(); pipelinesAssociatedWithATemplate.add(new CaseInsensitiveString("p1")); BasicCruiseConfig cruiseConfig = GoConfigMother.defaultCruiseConfig(); new GoConfigMother().addPipelineWithTemplate(cruiseConfig, "p1", "t1", "s1", "j1"); assertThat(cruiseConfig.pipelinesAssociatedWithTemplate(new CaseInsensitiveString("t1")), is(pipelinesAssociatedWithATemplate)); } @Test public void shouldReturnNullForAssociatedPipelineNamesWhenTemplateNameIsBlank() { ArrayList<CaseInsensitiveString> pipelinesAssociatedWithATemplate = new ArrayList<>(); pipelinesAssociatedWithATemplate.add(new CaseInsensitiveString("p1")); BasicCruiseConfig cruiseConfig = GoConfigMother.defaultCruiseConfig(); new GoConfigMother().addPipelineWithTemplate(cruiseConfig, "p1", "t1", "s1", "j1"); assertThat(cruiseConfig.pipelinesAssociatedWithTemplate(new CaseInsensitiveString("")), is(new ArrayList<CaseInsensitiveString>())); } @Test public void shouldReturnAnEmptyListForPipelinesIfTemplateNameIsNull() { BasicCruiseConfig cruiseConfig = GoConfigMother.defaultCruiseConfig(); assertThat(cruiseConfig.pipelinesAssociatedWithTemplate(null).isEmpty(), is(true)); } @Test public void shouldReturnAnEmptyListIfThereAreNoPipelinesAssociatedWithGivenTemplate() { BasicCruiseConfig cruiseConfig = GoConfigMother.defaultCruiseConfig(); assertThat(cruiseConfig.pipelinesAssociatedWithTemplate(new CaseInsensitiveString("non-existent-template")).isEmpty(), is(true)); } @Test public void shouldReturnAMapOfAllTemplateNamesToPipelinesForAnAdminUser() { BasicCruiseConfig cruiseConfig = getCruiseConfigWithSecurityEnabled(); new GoConfigMother().addPipelineWithTemplate(cruiseConfig, "p1", "t1", "s1", "j1"); new GoConfigMother().addPipelineWithTemplate(cruiseConfig, "p2", "t2", "s2", "j2"); HashMap<CaseInsensitiveString, List<CaseInsensitiveString>> map = new HashMap<>(); List<CaseInsensitiveString> template1Pipelines = Arrays.asList(new CaseInsensitiveString("p1")); List<CaseInsensitiveString> template2Pipelines = Arrays.asList(new CaseInsensitiveString("p2")); map.put(new CaseInsensitiveString("t1"), template1Pipelines); map.put(new CaseInsensitiveString("t2"), template2Pipelines); assertThat(cruiseConfig.templatesWithPipelinesForUser("admin", null), is(map)); } @Test public void shouldReturnASubsetOfTemplatesToPipelinesMapForTemplateAdmin() { BasicCruiseConfig cruiseConfig = getCruiseConfigWithSecurityEnabled(); CaseInsensitiveString templateAdmin = new CaseInsensitiveString("template-admin"); new GoConfigMother().addPipelineWithTemplate(cruiseConfig, "p1", "t1", "s1", "j1"); PipelineTemplateConfig template2 = PipelineTemplateConfigMother.createTemplate("t2", new Authorization(new AdminsConfig(new AdminUser(templateAdmin))), StageConfigMother.manualStage("foo")); cruiseConfig.addTemplate(template2); HashMap<CaseInsensitiveString, List<CaseInsensitiveString>> map = new HashMap<>(); map.put(new CaseInsensitiveString("t2"), new ArrayList<>()); assertThat(cruiseConfig.templatesWithPipelinesForUser(templateAdmin.toString(), null), is(map)); } @Test public void shouldReturnASubsetOfTemplatesToPipelinesMapForTemplateViewUser() { BasicCruiseConfig cruiseConfig = getCruiseConfigWithSecurityEnabled(); CaseInsensitiveString templateViewUser = new CaseInsensitiveString("template-view"); PipelineTemplateConfig template2 = PipelineTemplateConfigMother.createTemplate("t2", new Authorization(new ViewConfig(new AdminUser(templateViewUser))), StageConfigMother.manualStage("foo")); cruiseConfig.addTemplate(template2); HashMap<CaseInsensitiveString, List<CaseInsensitiveString>> map = new HashMap<>(); map.put(new CaseInsensitiveString("t2"), new ArrayList<>()); assertThat(cruiseConfig.templatesWithPipelinesForUser(templateViewUser.toString(), null), is(map)); } @Test public void shouldReturnASubsetOfTemplatesToPipelinesMapForGroupAdmin() { BasicCruiseConfig cruiseConfig = getCruiseConfigWithSecurityEnabled(); CaseInsensitiveString groupAdmin = new CaseInsensitiveString("template-view"); new GoConfigMother().addPipelineWithGroup(cruiseConfig, "group", "p1", "s1", "j1"); PipelineConfigs pipelineConfigs = cruiseConfig.getGroups().get(0); pipelineConfigs.setAuthorization(new Authorization(new AdminsConfig(new AdminUser(groupAdmin)))); PipelineTemplateConfig template2 = PipelineTemplateConfigMother.createTemplate("t2", StageConfigMother.manualStage("foo")); cruiseConfig.addTemplate(template2); HashMap<CaseInsensitiveString, List<CaseInsensitiveString>> map = new HashMap<>(); map.put(new CaseInsensitiveString("t2"), new ArrayList<>()); assertThat(cruiseConfig.templatesWithPipelinesForUser(groupAdmin.toString(), null), is(map)); } @Test public void shouldReturnAnEmptyMapForARegularUser() { BasicCruiseConfig cruiseConfig = getCruiseConfigWithSecurityEnabled(); CaseInsensitiveString regularUser = new CaseInsensitiveString("view"); new GoConfigMother().addPipelineWithTemplate(cruiseConfig, "p1", "t1", "s1", "j1"); assertThat(cruiseConfig.templatesWithPipelinesForUser(regularUser.toString(), null), is(new HashMap<>())); } private BasicCruiseConfig getCruiseConfigWithSecurityEnabled() { BasicCruiseConfig cruiseConfig = GoConfigMother.defaultCruiseConfig(); ServerConfig serverConfig = new ServerConfig(new SecurityConfig(null, null, false, new AdminsConfig(new AdminUser(new CaseInsensitiveString("admin")))), null); cruiseConfig.setServerConfig(serverConfig); GoConfigMother.enableSecurityWithPasswordFile(cruiseConfig); return cruiseConfig; } }
/* * Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * ScriptURLNodeStepExecutor.java * * User: Greg Schueler <a href="mailto:[email protected]">[email protected]</a> * Created: 5/2/12 2:37 PM * */ package com.dtolabs.rundeck.core.execution.workflow.steps.node.impl; import com.dtolabs.rundeck.core.Constants; import com.dtolabs.rundeck.core.common.Framework; import com.dtolabs.rundeck.core.common.INodeEntry; import com.dtolabs.rundeck.core.common.UpdateUtils; import com.dtolabs.rundeck.core.common.impl.URLFileUpdater; import com.dtolabs.rundeck.core.common.impl.URLFileUpdaterBuilder; import com.dtolabs.rundeck.core.dispatcher.DataContextUtils; import com.dtolabs.rundeck.core.execution.workflow.StepExecutionContext; import com.dtolabs.rundeck.core.execution.workflow.steps.FailureReason; import com.dtolabs.rundeck.core.execution.workflow.steps.StepFailureReason; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepException; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutionItem; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutor; import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepResult; import com.dtolabs.rundeck.core.utils.Converter; import org.apache.commons.codec.binary.Hex; import org.apache.commons.httpclient.URIException; import org.apache.commons.httpclient.util.URIUtil; import org.apache.log4j.Logger; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.Charset; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Map; /** * ScriptURLNodeStepExecutor is a NodeStepExecutor for executing a script retrieved from a URL. * * @author Greg Schueler <a href="mailto:[email protected]">[email protected]</a> */ public class ScriptURLNodeStepExecutor implements NodeStepExecutor { public static final Logger logger = Logger.getLogger(ScriptURLNodeStepExecutor.class.getName()); public static final String SERVICE_IMPLEMENTATION_NAME = "script-url"; public static final int DEFAULT_TIMEOUT = 30; public static final boolean USE_CACHE = true; private File cacheDir; private Framework framework; URLFileUpdater.httpClientInteraction interaction; private ScriptFileNodeStepUtils scriptUtils = new DefaultScriptFileNodeStepUtils(); public ScriptURLNodeStepExecutor(Framework framework) { this.framework = framework; cacheDir = new File(Constants.getBaseVar(framework.getFilesystemFramework().getBaseDir().getAbsolutePath()) + "/cache/ScriptURLNodeStepExecutor"); } private static String hashURL(final String url) { try { MessageDigest digest = MessageDigest.getInstance("SHA-1"); digest.reset(); digest.update(url.getBytes(Charset.forName("UTF-8"))); return new String(Hex.encodeHex(digest.digest())); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } return Integer.toString(url.hashCode()); } public ScriptFileNodeStepUtils getScriptUtils() { return scriptUtils; } public void setScriptUtils(ScriptFileNodeStepUtils scriptUtils) { this.scriptUtils = scriptUtils; } static enum Reason implements FailureReason{ /** * Failed to download required URL */ URLDownloadFailure } public NodeStepResult executeNodeStep( final StepExecutionContext context, final NodeStepExecutionItem item, final INodeEntry node ) throws NodeStepException { final ScriptURLCommandExecutionItem script = (ScriptURLCommandExecutionItem) item; File destinationTempFile = downloadURLToTempFile(context, node, script); if (!USE_CACHE) { destinationTempFile.deleteOnExit(); } return scriptUtils.executeScriptFile( context, node, null, destinationTempFile.getAbsolutePath(), null, script.getFileExtension(), script.getArgs(), script.getScriptInterpreter(), script.getInterpreterArgsQuoted(), framework.getExecutionService() ); } private File downloadURLToTempFile( final StepExecutionContext context, final INodeEntry node, final ScriptURLCommandExecutionItem script ) throws NodeStepException { if (!cacheDir.isDirectory() && !cacheDir.mkdirs()) { throw new RuntimeException("Unable to create cachedir: " + cacheDir.getAbsolutePath()); } //create node context for node and substitute data references in command final Map<String, Map<String, String>> nodeDataContext = DataContextUtils.addContext("node", DataContextUtils.nodeData(node), context.getDataContext()); final String finalUrl = expandUrlString(script.getURLString(), nodeDataContext); final URL url; try { url = new URL(finalUrl); } catch (MalformedURLException e) { throw new NodeStepException(e, StepFailureReason.ConfigurationFailure, node.getNodename()); } if(null!=context.getExecutionListener()){ context.getExecutionListener().log(4, "Requesting URL: " + url.toExternalForm()); } String cleanUrl = url.toExternalForm().replaceAll("^(https?://)([^:@/]+):[^@/]*@", "$1$2:****@"); String tempFileName = hashURL(url.toExternalForm()) + ".temp"; File destinationTempFile = new File(cacheDir, tempFileName); File destinationCacheData = new File(cacheDir, tempFileName + ".cache.properties"); //update from URL if necessary final URLFileUpdaterBuilder urlFileUpdaterBuilder = new URLFileUpdaterBuilder() .setUrl(url) .setAcceptHeader("*/*") .setTimeout(DEFAULT_TIMEOUT); if (USE_CACHE) { urlFileUpdaterBuilder .setCacheMetadataFile(destinationCacheData) .setCachedContent(destinationTempFile) .setUseCaching(true); } final URLFileUpdater updater = urlFileUpdaterBuilder.createURLFileUpdater(); try { if (null != interaction) { //allow mock updater.setInteraction(interaction); } UpdateUtils.update(updater, destinationTempFile); logger.debug("Updated nodes resources file: " + destinationTempFile); } catch (UpdateUtils.UpdateException e) { if (!destinationTempFile.isFile() || destinationTempFile.length() < 1) { throw new NodeStepException( "Error requesting URL Script: " + cleanUrl + ": " + e.getMessage(), e, Reason.URLDownloadFailure, node.getNodename()); } else { logger.error( "Error requesting URL script: " + cleanUrl + ": " + e.getMessage(), e); } } return destinationTempFile; } public static final Converter<String, String> urlPathEncoder = new Converter<String, String>() { public String convert(String s) { try { return URIUtil.encodeWithinPath(s, "UTF-8"); } catch (URIException e) { e.printStackTrace(); return s; } } }; public static final Converter<String, String> urlQueryEncoder = new Converter<String, String>() { public String convert(String s) { try { return URIUtil.encodeWithinQuery(s, "UTF-8"); } catch (URIException e) { e.printStackTrace(); return s; } } }; /** * Expand data references in a URL string, using proper encoding for path and query parts. * @param urlString url * @param dataContext data * @return expanded string */ public static String expandUrlString(final String urlString, final Map<String, Map<String, String>> dataContext) { final String origUrl = urlString; final int qindex = origUrl.indexOf("?"); final StringBuilder builder = new StringBuilder(); if (qindex > 0) { builder.append(DataContextUtils.replaceDataReferences(origUrl.substring(0, qindex), dataContext, urlPathEncoder, true)); builder.append("?"); if (qindex < origUrl.length() - 1) { builder.append(DataContextUtils.replaceDataReferences(origUrl.substring(qindex + 1), dataContext, urlQueryEncoder, true)); } return builder.toString(); } else { return DataContextUtils.replaceDataReferences(urlString, dataContext, urlPathEncoder, false); } } URLFileUpdater.httpClientInteraction getInteraction() { return interaction; } void setInteraction(URLFileUpdater.httpClientInteraction interaction) { this.interaction = interaction; } }
package org.jgroups.protocols.pbcast; import org.jgroups.Address; import org.jgroups.Event; import org.jgroups.Message; import org.jgroups.View; import org.jgroups.util.*; import java.util.*; /** * Coordinator role of the Group MemberShip (GMS) protocol. Accepts JOIN and LEAVE requests and emits view changes * accordingly. * @author Bela Ban */ public class CoordGmsImpl extends ServerGmsImpl { protected static final Long MAX_SUSPEND_TIMEOUT=30000L; public CoordGmsImpl(GMS g) { super(g); } public MergeId getMergeId() { return merger.getMergeId(); } public void init() throws Exception { super.init(); merger.cancelMerge(null); } public void join(Address mbr,boolean useFlushIfPresent) { wrongMethod("join"); } public void joinWithStateTransfer(Address mbr,boolean useFlushIfPresent) { wrongMethod("join"); } /** The coordinator itself wants to leave the group */ public void leave(Address mbr) { if(mbr == null) { if(log.isErrorEnabled()) log.error("member's address is null !"); return; } if(mbr.equals(gms.local_addr)) leaving=true; gms.getViewHandler().add(new Request(Request.LEAVE, mbr, false)); gms.getViewHandler().stop(true); // wait until all requests have been processed, then close the queue and leave // If we're the coord leaving, ignore gms.leave_timeout: https://issues.jboss.org/browse/JGRP-1509 long timeout=(long)(Math.max(gms.leave_timeout, gms.view_ack_collection_timeout) * 1.10); gms.getViewHandler().waitUntilCompleted(timeout); } public void suspect(Address mbr) { if(mbr.equals(gms.local_addr)) { if(log.isWarnEnabled()) log.warn("I am the coord and I'm suspected -- will probably leave shortly"); return; } Collection<Request> suspected=new LinkedHashSet<Request>(1); suspected.add(new Request(Request.SUSPECT,mbr,true)); handleMembershipChange(suspected); } /** * Invoked upon receiving a MERGE event from the MERGE layer. Starts the merge protocol. * See description of protocol in DESIGN. * @param views A List of <em>different</em> views detected by the merge protocol */ public void merge(Map<Address, View> views) { merger.merge(views); } public void handleMergeResponse(MergeData data, MergeId merge_id) { merger.handleMergeResponse(data, merge_id); } public void handleMergeCancelled(MergeId merge_id) { merger.handleMergeCancelled(merge_id); } /** * Fetches the digests from all members and installs them again. Used only for diagnosis and support; don't * use this otherwise ! */ void fixDigests() { merger.fixDigests(); } public void handleMembershipChange(Collection<Request> requests) { boolean joinAndStateTransferInitiated=false; boolean useFlushIfPresent=gms.use_flush_if_present; Collection<Address> new_mbrs=new LinkedHashSet<Address>(requests.size()); Collection<Address> suspected_mbrs=new LinkedHashSet<Address>(requests.size()); Collection<Address> leaving_mbrs=new LinkedHashSet<Address>(requests.size()); boolean self_leaving=false; // is the coord leaving for(Request req: requests) { switch(req.type) { case Request.JOIN: new_mbrs.add(req.mbr); if(req.useFlushIfPresent) useFlushIfPresent=true; break; case Request.JOIN_WITH_STATE_TRANSFER: new_mbrs.add(req.mbr); joinAndStateTransferInitiated=true; if(req.useFlushIfPresent) useFlushIfPresent=true; break; case Request.LEAVE: if(req.suspected) suspected_mbrs.add(req.mbr); else { leaving_mbrs.add(req.mbr); if(gms.local_addr != null && gms.local_addr.equals(req.mbr)) self_leaving=true; } break; case Request.SUSPECT: suspected_mbrs.add(req.mbr); break; } } new_mbrs.remove(gms.local_addr); // remove myself - cannot join myself (already joined) if(gms.getViewId() == null) { // we're probably not the coord anymore (we just left ourselves), let someone else do it // (client will retry when it doesn't get a response) log.debug("gms.view_id is null, I'm not the coordinator anymore (leaving=%b); " + "the new coordinator will handle the leave request", self_leaving); return; } List<Address> current_members=gms.members.getMembers(); leaving_mbrs.retainAll(current_members); // remove all elements of leaving_mbrs which are not current members if(suspected_mbrs.remove(gms.local_addr)) log.warn("I am the coord and I'm being suspected -- will probably leave shortly"); suspected_mbrs.retainAll(current_members); // remove all elements of suspected_mbrs which are not current members // for the members that have already joined, return the current digest and membership for(Iterator<Address> it=new_mbrs.iterator(); it.hasNext();) { Address mbr=it.next(); if(gms.members.contains(mbr)) { // already joined: return current digest and membership log.trace("%s: %s already present; returning existing view %s", gms.local_addr, mbr, gms.view); Tuple<View,Digest> tuple=gms.getViewAndDigest(); if(tuple != null) gms.sendJoinResponse(new JoinRsp(tuple.getVal1(), tuple.getVal2()), mbr); else log.warn("%s: did not find a digest matching view %s; dropping JOIN-RSP", gms.local_addr, gms.view); it.remove(); // remove it anyway, even if we didn't find a digest matching the view (joiner will retry) } } if(new_mbrs.isEmpty() && leaving_mbrs.isEmpty() && suspected_mbrs.isEmpty()) { log.trace("%s: found no members to add or remove, will not create new view", gms.local_addr); return; } View new_view=gms.getNextView(new_mbrs, leaving_mbrs, suspected_mbrs); if(new_view.size() == 0 && gms.local_addr != null && gms.local_addr.equals(new_view.getCreator())) { if(self_leaving) gms.initState(); // in case connect() is called again return; } log.trace("%s: joiners=%s, suspected=%s, leaving=%s, new view: %s", gms.local_addr, new_mbrs, suspected_mbrs, leaving_mbrs, new_view); JoinRsp join_rsp=null; boolean hasJoiningMembers=!new_mbrs.isEmpty(); try { boolean successfulFlush =!useFlushIfPresent || !gms.flushProtocolInStack || gms.startFlush(new_view); if(!successfulFlush && hasJoiningMembers) { // Don't send a join response if the flush fails (http://jira.jboss.org/jira/browse/JGRP-759) // The joiner should block until the previous FLUSH completed sendLeaveResponses(leaving_mbrs); // we still have to send potential leave responses // but let the joining client timeout and send another join request return; } // we cannot garbage collect during joining a new member *if* we're the only member // Example: {A}, B joins, after returning JoinRsp to B, A garbage collects messages higher than those // in the digest returned to the client, so the client will *not* be able to ask for retransmission // of those messages if he misses them if(hasJoiningMembers) { gms.getDownProtocol().down(new Event(Event.SUSPEND_STABLE, MAX_SUSPEND_TIMEOUT)); // create a new digest, which contains the new members, minus left members MutableDigest join_digest=new MutableDigest(new_view.getMembersRaw()).set(gms.getDigest()); for(Address member: new_mbrs) join_digest.set(member,0,0); // ... and set the new members. their first seqno will be 1 // If the digest from NAKACK doesn't include all members of the view, we try once more; if it is still // incomplete, we don't send a JoinRsp back to the joiner(s). This shouldn't be a problem as they will retry if(join_digest.allSet() || join_digest.set(gms.getDigest()).allSet()) join_rsp=new JoinRsp(new_view, join_digest); else log.warn("%s: digest does not match view (missing seqnos for %s); dropping JOIN-RSP", gms.local_addr, Arrays.toString(join_digest.getNonSetMembers())); } sendLeaveResponses(leaving_mbrs); // no-op if no leaving members // we don't need to send the digest to existing members: https://issues.jboss.org/browse/JGRP-1317 gms.castViewChange(new_view, null, new_mbrs); gms.sendJoinResponses(join_rsp, new_mbrs); } finally { if(hasJoiningMembers) gms.getDownProtocol().down(new Event(Event.RESUME_STABLE)); if(!joinAndStateTransferInitiated && useFlushIfPresent) gms.stopFlush(); if(self_leaving) gms.initState(); // in case connect() is called again } } /** * Called by the GMS when a VIEW is received. * @param new_view The view to be installed * @param digest If view is a MergeView, digest contains the seqno digest of all members and has to * be set by GMS */ public void handleViewChange(View new_view, Digest digest) { if(leaving && !new_view.containsMember(gms.local_addr)) return; gms.installView(new_view, digest); } public void stop() { super.stop(); // sets leaving=false merger.stop(); } private void sendLeaveResponses(Collection<Address> leaving_members) { for(Address address: leaving_members){ Message msg=new Message(address).setFlag(Message.Flag.OOB, Message.Flag.INTERNAL, Message.Flag.NO_RELIABILITY) .putHeader(gms.getId(), new GMS.GmsHeader(GMS.GmsHeader.LEAVE_RSP)); log.trace("%s: sending LEAVE response to %s", gms.local_addr, address); gms.getDownProtocol().down(new Event(Event.MSG, msg)); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.www; import java.net.SocketException; import java.util.ArrayList; import java.util.List; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.jdbc.TransDataService; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.logging.LoggingObjectInterface; import org.pentaho.di.core.logging.LoggingObjectType; import org.pentaho.di.core.logging.SimpleLoggingObject; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.plugins.RepositoryPluginType; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.metastore.MetaStoreConst; import org.pentaho.di.repository.RepositoriesMeta; import org.pentaho.di.repository.Repository; import org.pentaho.di.repository.RepositoryMeta; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.stores.delegate.DelegatingMetaStore; import org.pentaho.metastore.stores.memory.MemoryMetaStore; import org.pentaho.metastore.stores.xml.XmlMetaStore; import org.w3c.dom.Node; public class SlaveServerConfig { public static final String XML_TAG = "slave_config"; public static final String XML_TAG_MASTERS = "masters"; public static final String XML_TAG_REPOSITORY = "repository"; public static final String XML_TAG_SEQUENCES = "sequences"; public static final String XML_TAG_AUTOSEQUENCE = "autosequence"; public static final String XML_TAG_AUTO_CREATE = "autocreate"; public static final String XML_TAG_SERVICES = "services"; public static final String XML_TAG_SERVICE = "service"; private List<SlaveServer> masters; private SlaveServer slaveServer; private boolean reportingToMasters; private boolean joining; private int maxLogLines; private int maxLogTimeoutMinutes; private int objectTimeoutMinutes; private String filename; private List<DatabaseMeta> databases; private List<SlaveSequence> slaveSequences; private SlaveSequence autoSequence; private boolean automaticCreationAllowed; private List<TransDataService> services; private Repository repository; private RepositoryMeta repositoryMeta; private String repositoryId; private String repositoryUsername; private String repositoryPassword; private DelegatingMetaStore metaStore; private String passwordFile; public SlaveServerConfig() { masters = new ArrayList<SlaveServer>(); databases = new ArrayList<DatabaseMeta>(); slaveSequences = new ArrayList<SlaveSequence>(); automaticCreationAllowed = false; services = new ArrayList<TransDataService>(); metaStore = new DelegatingMetaStore(); // Add the local Pentaho MetaStore to the delegation. // This sets it as the active one. // try { XmlMetaStore localStore = new XmlMetaStore( MetaStoreConst.getDefaultPentahoMetaStoreLocation() ); metaStore.addMetaStore( localStore ); metaStore.setActiveMetaStoreName( localStore.getName() ); } catch ( MetaStoreException e ) { LogChannel.GENERAL.logError( "Unable to open local Pentaho meta store from [" + MetaStoreConst.getDefaultPentahoMetaStoreLocation() + "]", e ); // now replace this with an in memory metastore. // try { MemoryMetaStore memoryStore = new MemoryMetaStore(); memoryStore.setName( "Memory metastore" ); metaStore.addMetaStore( memoryStore ); metaStore.setActiveMetaStoreName( memoryStore.getName() ); } catch ( MetaStoreException e2 ) { throw new RuntimeException( "Unable to add a default memory metastore to the delegating store", e ); } } passwordFile = null; // force lookup by server in ~/.kettle or local folder } public SlaveServerConfig( SlaveServer slaveServer ) { this(); this.slaveServer = slaveServer; } public SlaveServerConfig( List<SlaveServer> masters, boolean reportingToMasters, SlaveServer slaveServer ) { this.masters = masters; this.reportingToMasters = reportingToMasters; this.slaveServer = slaveServer; } public String getXML() { StringBuffer xml = new StringBuffer(); xml.append( XMLHandler.openTag( XML_TAG ) ); for ( SlaveServer slaveServer : masters ) { xml.append( slaveServer.getXML() ); } XMLHandler.addTagValue( "report_to_masters", reportingToMasters ); if ( slaveServer != null ) { xml.append( slaveServer.getXML() ); } XMLHandler.addTagValue( "joining", joining ); XMLHandler.addTagValue( "max_log_lines", maxLogLines ); XMLHandler.addTagValue( "max_log_timeout_minutes", maxLogTimeoutMinutes ); XMLHandler.addTagValue( "object_timeout_minutes", objectTimeoutMinutes ); xml.append( XMLHandler.openTag( XML_TAG_SEQUENCES ) ); for ( SlaveSequence slaveSequence : slaveSequences ) { xml.append( XMLHandler.openTag( SlaveSequence.XML_TAG ) ); xml.append( slaveSequence.getXML() ); xml.append( XMLHandler.closeTag( SlaveSequence.XML_TAG ) ); } xml.append( XMLHandler.closeTag( XML_TAG_SEQUENCES ) ); if ( autoSequence != null ) { xml.append( XMLHandler.openTag( XML_TAG_AUTOSEQUENCE ) ); xml.append( autoSequence.getXML() ); xml.append( XMLHandler.addTagValue( XML_TAG_AUTO_CREATE, automaticCreationAllowed ) ); xml.append( XMLHandler.closeTag( XML_TAG_AUTOSEQUENCE ) ); } xml.append( XMLHandler.openTag( XML_TAG_SERVICES ) ); for ( TransDataService service : services ) { xml.append( XMLHandler.openTag( XML_TAG_SERVICE ) ); xml.append( service.getXML() ); xml.append( XMLHandler.closeTag( XML_TAG_SERVICE ) ); } xml.append( XMLHandler.closeTag( XML_TAG_SERVICES ) ); if ( repositoryMeta != null ) { xml.append( XMLHandler.openTag( XML_TAG_REPOSITORY ) ); xml.append( " " ).append( XMLHandler.addTagValue( "id", repositoryMeta.getId() ) ); xml.append( " " ).append( XMLHandler.addTagValue( "username", repositoryUsername ) ); xml.append( " " ).append( XMLHandler.addTagValue( "password", Encr.encryptPasswordIfNotUsingVariables( repositoryPassword ) ) ); xml.append( XMLHandler.closeTag( XML_TAG_REPOSITORY ) ); } xml.append( XMLHandler.closeTag( XML_TAG ) ); return xml.toString(); } public SlaveServerConfig( LogChannelInterface log, Node node ) throws KettleXMLException { this(); Node mastersNode = XMLHandler.getSubNode( node, XML_TAG_MASTERS ); int nrMasters = XMLHandler.countNodes( mastersNode, SlaveServer.XML_TAG ); for ( int i = 0; i < nrMasters; i++ ) { Node masterSlaveNode = XMLHandler.getSubNodeByNr( mastersNode, SlaveServer.XML_TAG, i ); SlaveServer masterSlaveServer = new SlaveServer( masterSlaveNode ); checkNetworkInterfaceSetting( log, masterSlaveNode, masterSlaveServer ); masters.add( masterSlaveServer ); } reportingToMasters = "Y".equalsIgnoreCase( XMLHandler.getTagValue( node, "report_to_masters" ) ); Node slaveNode = XMLHandler.getSubNode( node, SlaveServer.XML_TAG ); if ( slaveNode != null ) { slaveServer = new SlaveServer( slaveNode ); checkNetworkInterfaceSetting( log, slaveNode, slaveServer ); } joining = "Y".equalsIgnoreCase( XMLHandler.getTagValue( node, "joining" ) ); maxLogLines = Const.toInt( XMLHandler.getTagValue( node, "max_log_lines" ), 0 ); maxLogTimeoutMinutes = Const.toInt( XMLHandler.getTagValue( node, "max_log_timeout_minutes" ), 0 ); objectTimeoutMinutes = Const.toInt( XMLHandler.getTagValue( node, "object_timeout_minutes" ), 0 ); // Read sequence information // List<Node> dbNodes = XMLHandler.getNodes( node, DatabaseMeta.XML_TAG ); for ( Node dbNode : dbNodes ) { databases.add( new DatabaseMeta( dbNode ) ); } Node sequencesNode = XMLHandler.getSubNode( node, "sequences" ); List<Node> seqNodes = XMLHandler.getNodes( sequencesNode, SlaveSequence.XML_TAG ); for ( Node seqNode : seqNodes ) { slaveSequences.add( new SlaveSequence( seqNode, databases ) ); } Node autoSequenceNode = XMLHandler.getSubNode( node, XML_TAG_AUTOSEQUENCE ); if ( autoSequenceNode != null ) { autoSequence = new SlaveSequence( autoSequenceNode, databases ); automaticCreationAllowed = "Y".equalsIgnoreCase( XMLHandler.getTagValue( autoSequenceNode, XML_TAG_AUTO_CREATE ) ); } Node servicesNode = XMLHandler.getSubNode( node, XML_TAG_SERVICES ); List<Node> servicesNodes = XMLHandler.getNodes( servicesNode, XML_TAG_SERVICE ); for ( Node serviceNode : servicesNodes ) { TransDataService service = new TransDataService( serviceNode ); services.add( service ); } Node repositoryNode = XMLHandler.getSubNode( node, XML_TAG_REPOSITORY ); repositoryId = XMLHandler.getTagValue( repositoryNode, "name" ); repositoryUsername = XMLHandler.getTagValue( repositoryNode, "username" ); repositoryPassword = XMLHandler.getTagValue( repositoryNode, "password" ); } private void openRepository( String repositoryId ) throws KettleException { try { RepositoriesMeta repositoriesMeta = new RepositoriesMeta(); repositoriesMeta.readData(); repositoryMeta = repositoriesMeta.findRepository( repositoryId ); if ( repositoryMeta == null ) { throw new KettleException( "Unable to find repository: " + repositoryId ); } PluginRegistry registry = PluginRegistry.getInstance(); repository = registry.loadClass( RepositoryPluginType.class, repositoryMeta, Repository.class ); repository.init( repositoryMeta ); repository.connect( repositoryUsername, repositoryPassword ); // Add the repository MetaStore to the delegation as well. // Set this one as active with the highest priority // if ( repository.getMetaStore() != null ) { metaStore.addMetaStore( 0, repository.getMetaStore() ); metaStore.setActiveMetaStoreName( repository.getMetaStore().getName() ); } LogChannel.GENERAL.logBasic( "Connected to repository '" + repository.getName() + "'" ); } catch ( Exception e ) { throw new KettleException( "Unable to open repository connection", e ); } } public void readAutoSequences() throws KettleException { if ( autoSequence == null ) { return; } Database database = null; try { DatabaseMeta databaseMeta = autoSequence.getDatabaseMeta(); LoggingObjectInterface loggingInterface = new SimpleLoggingObject( "auto-sequence", LoggingObjectType.GENERAL, null ); database = new Database( loggingInterface, databaseMeta ); database.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination( autoSequence.getSchemaName(), autoSequence.getTableName() ); String seqField = databaseMeta.quoteField( autoSequence.getSequenceNameField() ); String valueField = databaseMeta.quoteField( autoSequence.getValueField() ); String sql = "SELECT " + seqField + ", " + valueField + " FROM " + schemaTable; List<Object[]> rows = database.getRows( sql, 0 ); RowMetaInterface rowMeta = database.getReturnRowMeta(); for ( Object[] row : rows ) { // Automatically create a new sequence for each sequence found... // String sequenceName = rowMeta.getString( row, seqField, null ); if ( !Const.isEmpty( sequenceName ) ) { Long value = rowMeta.getInteger( row, valueField, null ); if ( value != null ) { SlaveSequence slaveSequence = new SlaveSequence( sequenceName, value, databaseMeta, autoSequence.getSchemaName(), autoSequence .getTableName(), autoSequence.getSequenceNameField(), autoSequence.getValueField() ); slaveSequences.add( slaveSequence ); LogChannel.GENERAL.logBasic( "Automatically created slave sequence '" + slaveSequence.getName() + "' with start value " + slaveSequence.getStartValue() ); } } } } catch ( Exception e ) { throw new KettleException( "Unable to automatically configure slave sequences", e ); } finally { if ( database != null ) { database.disconnect(); } } } private void checkNetworkInterfaceSetting( LogChannelInterface log, Node slaveNode, SlaveServer slaveServer ) { // See if we need to grab the network interface to use and then override the host name // String networkInterfaceName = XMLHandler.getTagValue( slaveNode, "network_interface" ); if ( !Const.isEmpty( networkInterfaceName ) ) { // OK, so let's try to get the IP address for this network interface... // try { String newHostname = Const.getIPAddress( networkInterfaceName ); if ( newHostname != null ) { slaveServer.setHostname( newHostname ); // Also change the name of the slave... // slaveServer.setName( slaveServer.getName() + "-" + newHostname ); log.logBasic( "Hostname for slave server [" + slaveServer.getName() + "] is set to [" + newHostname + "], information derived from network " + networkInterfaceName ); } } catch ( SocketException e ) { log.logError( "Unable to get the IP address for network interface " + networkInterfaceName + " for slave server [" + slaveServer.getName() + "]", e ); } } } public SlaveServerConfig( String hostname, int port, boolean joining ) { this(); this.joining = joining; this.slaveServer = new SlaveServer( hostname + ":" + port, hostname, "" + port, null, null ); } /** * @return the list of masters to report back to if the report to masters flag is enabled. */ public List<SlaveServer> getMasters() { return masters; } /** * @param masters * the list of masters to set. It is the list of masters to report back to if the report to masters flag is * enabled. */ public void setMasters( List<SlaveServer> masters ) { this.masters = masters; } /** * @return the slave server.<br> * The user name and password defined in here are used to contact this slave by the masters. */ public SlaveServer getSlaveServer() { return slaveServer; } /** * @param slaveServer * the slave server details to set.<br> * The user name and password defined in here are used to contact this slave by the masters. */ public void setSlaveServer( SlaveServer slaveServer ) { this.slaveServer = slaveServer; } /** * @return true if this slave reports to the masters */ public boolean isReportingToMasters() { return reportingToMasters; } /** * @param reportingToMaster * set to true if this slave should report to the masters */ public void setReportingToMasters( boolean reportingToMaster ) { this.reportingToMasters = reportingToMaster; } /** * @return true if the webserver needs to join with the webserver threads (wait/block until finished) */ public boolean isJoining() { return joining; } /** * @param joining * Set to true if the webserver needs to join with the webserver threads (wait/block until finished) */ public void setJoining( boolean joining ) { this.joining = joining; } /** * @return the maxLogLines */ public int getMaxLogLines() { return maxLogLines; } /** * @param maxLogLines * the maxLogLines to set */ public void setMaxLogLines( int maxLogLines ) { this.maxLogLines = maxLogLines; } /** * @return the maxLogTimeoutMinutes */ public int getMaxLogTimeoutMinutes() { return maxLogTimeoutMinutes; } /** * @param maxLogTimeoutMinutes * the maxLogTimeoutMinutes to set */ public void setMaxLogTimeoutMinutes( int maxLogTimeoutMinutes ) { this.maxLogTimeoutMinutes = maxLogTimeoutMinutes; } /** * @return the objectTimeoutMinutes */ public int getObjectTimeoutMinutes() { return objectTimeoutMinutes; } /** * @param objectTimeoutMinutes * the objectTimeoutMinutes to set */ public void setObjectTimeoutMinutes( int objectTimeoutMinutes ) { this.objectTimeoutMinutes = objectTimeoutMinutes; } /** * @return the filename */ public String getFilename() { return filename; } /** * @param filename * the filename to set */ public void setFilename( String filename ) { this.filename = filename; } /** * @return the databases */ public List<DatabaseMeta> getDatabases() { return databases; } /** * @param databases * the databases to set */ public void setDatabases( List<DatabaseMeta> databases ) { this.databases = databases; } /** * @return the slaveSequences */ public List<SlaveSequence> getSlaveSequences() { return slaveSequences; } /** * @param slaveSequences * the slaveSequences to set */ public void setSlaveSequences( List<SlaveSequence> slaveSequences ) { this.slaveSequences = slaveSequences; } /** * @return the autoSequence */ public SlaveSequence getAutoSequence() { return autoSequence; } /** * @param autoSequence * the autoSequence to set */ public void setAutoSequence( SlaveSequence autoSequence ) { this.autoSequence = autoSequence; } /** * @return the automaticCreationAllowed */ public boolean isAutomaticCreationAllowed() { return automaticCreationAllowed; } /** * @param automaticCreationAllowed * the automaticCreationAllowed to set */ public void setAutomaticCreationAllowed( boolean automaticCreationAllowed ) { this.automaticCreationAllowed = automaticCreationAllowed; } /** * @return the services */ public List<TransDataService> getServices() { return services; } /** * @param services * the services to set */ public void setServices( List<TransDataService> services ) { this.services = services; } /** * @return the repository, loaded lazily */ public Repository getRepository() throws KettleException { if ( !Const.isEmpty( repositoryId ) && repository == null ) { openRepository( repositoryId ); } return repository; } /** * @param repository * the repository to set */ public void setRepository( Repository repository ) { this.repository = repository; } /** * @return the repositoryUsername */ public String getRepositoryUsername() { return repositoryUsername; } /** * @param repositoryUsername * the repositoryUsername to set */ public void setRepositoryUsername( String repositoryUsername ) { this.repositoryUsername = repositoryUsername; } /** * @return the repositoryPassword */ public String getRepositoryPassword() { return repositoryPassword; } /** * @param repositoryPassword * the repositoryPassword to set */ public void setRepositoryPassword( String repositoryPassword ) { this.repositoryPassword = repositoryPassword; } public DelegatingMetaStore getMetaStore() { return metaStore; } public void setMetaStore( DelegatingMetaStore metaStore ) { this.metaStore = metaStore; } public String getPasswordFile() { return passwordFile; } public void setPasswordFile( String passwordFile ) { this.passwordFile = passwordFile; } public String getRepositoryId() { return repositoryId; } public void setRepositoryId( String repositoryId ) { this.repositoryId = repositoryId; } }
package TwitterDownload; /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * * @author Bones */ import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.sql.*; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Scanner; import jxl.write.DateTime; import java.util.Date; public class DataMethods { static final String JDBC_DRIVER = "com.mysql.jdbc.Driver"; static final String DEFAULT_DB_URL = "41.185.26.152"; static final String DEFAULT_DB_NAME = "dataferret"; static final String DEFAULT_DB_USERNAME = "checkers"; static final String DEFAULT_DB_PASSWORD = "Trunkswilltry001!"; public static Statement getDefaultConnection() throws SQLException, ClassNotFoundException, IOException{ return getConnection(DEFAULT_DB_URL, DEFAULT_DB_NAME, DEFAULT_DB_USERNAME, DEFAULT_DB_PASSWORD); } public static Statement getConnection(String dbUrl, String dbName, String dbUsername, String dbPassword) throws SQLException, ClassNotFoundException, IOException{ if(dbUrl == null || dbUrl.length() == 0) { dbUrl = DEFAULT_DB_URL; } if(dbName == null || dbName.length() == 0) { dbName = DEFAULT_DB_NAME; } if(dbUsername == null || dbUsername.length() == 0) { dbUsername = DEFAULT_DB_USERNAME; } if(dbPassword == null || dbPassword.length() == 0) { dbPassword = DEFAULT_DB_PASSWORD; } // String path = "/settings/dataferret.config"; // // File test = new File(path); // String testPath = test.getAbsolutePath(); // // Scanner scanner = new Scanner(new File(path)); // while(scanner.hasNext()) { // String line = scanner.nextLine(); // int index = line.indexOf(":"); // String key = line.substring(0, index).trim(); // String value = line.substring(index + 1, line.length()).trim(); // // switch(key) { // case "databaseURL": // DB_URL = value; // if(DB_URL.charAt(DB_URL.length() - 1) != '/') { // DB_URL += "/"; // } // break; // case "databaseName": // DB_Name = value; // break; // case "databaseUsername": // DB_Username = value; // break; // case "databasePassword": // DB_Password = value; // break; // } // } // scanner.close(); Class.forName("com.mysql.jdbc.Driver"); Connection conn = DriverManager.getConnection("jdbc:mysql://" + dbUrl + " :3306/" + dbName, dbUsername, dbPassword); Statement stmt = conn.createStatement(); return stmt; } public static ResultSet getUser(String userId) { try { ResultSet res = getData("SELECT * FROM Users WHERE userId = '" + userId + "'"); return res; } catch(Exception ex) { logError(ex); return null; } } public static int saveUser(long twitterId, String screenName, String userName) { ResultSet result = null; try { int ret = -1; result = getData("SELECT userId FROM Users WHERE twitterId = " + twitterId); if(result == null || !result.next()) { runQuery("INSERT INTO Users (screenName, twitterId, userName) VALUES ('" + screenName + "', " + twitterId + ", '" + userName + "')"); result = getData("SELECT userId FROM Users WHERE twitterId = '" + twitterId + "'"); if(result.next()) ret = result.getInt("userId"); } else ret = result.getInt("userId"); result.close(); return ret; } catch(Exception ex) { logError(ex); return -1; } } public static void saveLogin(int userId, String ipAddress) { try { runQuery("INSERT INTO Logins (userId, ipAddress) VALUES ('" + userId + "', '" + ipAddress + "')"); } catch(Exception ex) { logError(ex); } } public static void saveDownload(int userId, String filePath, String productId, Double price) { ResultSet res; try { Double.parseDouble(filePath); res = getData("SELECT MAX(loginId) FROM Logins WHERE userId = " + userId); res.next(); int loginId = res.getInt("MAX(loginId)"); runQuery("INSERT INTO Downloads (userId, loginId, product, filePath, paidZAR) VALUES ('" + userId + "', " + loginId +"," + productId + ", '" + filePath + "', " + price + ")"); res.close(); } catch(Exception ex) { logError(ex); } } public static ResultSet getData(String sql) { try { Statement stmt = getDefaultConnection(); ResultSet res = stmt.executeQuery(sql); stmt.closeOnCompletion(); return res; } catch(Exception ex) { logError(ex); return null; } } public static ResultSet getDebugData(String sql, String dbUrl, String dbName, String dbUsername, String dbPassword) throws IOException, SQLException, ClassNotFoundException { Statement stmt = getConnection(dbUrl, dbName, dbUsername, dbPassword); ResultSet res = stmt.executeQuery(sql); stmt.closeOnCompletion(); return res; } public static String getProducts() { try { Statement stmt = getDefaultConnection(); ResultSet res = stmt.executeQuery("SELECT * FROM dataferret.Products WHERE active = 1 ORDER BY priceDollars;"); String prods = "["; while(res.next()){ prods += "{$id$:" + res.getInt("productId") + ", $name$: $" + res.getString("name") + "$, $price$: " + res.getDouble("priceDollars") + ", $noOfTweets$: " + res.getInt("ammountOfData") + "},"; } prods = prods.substring(0, prods.length() - 1); prods += "]"; res.close(); stmt.close(); return prods; } catch(Exception ex) { logError(ex); return null; } } public static void runQuery(String sql) { try { Statement stmt = getDefaultConnection(); stmt.execute(sql); stmt.closeOnCompletion(); } catch(Exception ex) { logError(ex); } } public static void logError(Exception ex) { try { String path = "/log"; File dir = new File(path); if(!dir.exists()) dir.mkdir(); path = path + "/dataferret_error.log"; File file = new File(path); if(!file.exists()) file.createNewFile(); DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); Date date = new Date(); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); ex.printStackTrace(pw); String error = "Database Error: " + dateFormat.format(date) + ", message: " + sw.toString() + "\r\n"; FileWriter writer = new FileWriter(new File(path), true); writer.append(error); writer.flush(); writer.close(); } catch(IOException e) { System.out.println("IOException occured while trying to log error, message: " + e.getLocalizedMessage()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.reporting.s2s; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.Validator; import org.apache.nifi.events.EventReporter; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.remote.client.SiteToSiteClient; import org.apache.nifi.remote.protocol.SiteToSiteTransportProtocol; import org.apache.nifi.remote.protocol.http.HttpProxy; import org.apache.nifi.remote.util.SiteToSiteRestApiClient; import org.apache.nifi.reporting.ReportingContext; import org.apache.nifi.ssl.RestrictedSSLContextService; import org.apache.nifi.ssl.SSLContextService; import org.apache.nifi.util.StringUtils; import javax.net.ssl.SSLContext; import java.util.concurrent.TimeUnit; public class SiteToSiteUtils { public static final PropertyDescriptor DESTINATION_URL = new PropertyDescriptor.Builder() .name("Destination URL") .displayName("Destination URL") .description("The URL of the destination NiFi instance or, if clustered, a comma-separated list of address in the format " + "of http(s)://host:port/nifi. This destination URL will only be used to initiate the Site-to-Site connection. The " + "data sent by this reporting task will be load-balanced on all the nodes of the destination (if clustered).") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(new NiFiUrlValidator()) .build(); public static final PropertyDescriptor PORT_NAME = new PropertyDescriptor.Builder() .name("Input Port Name") .displayName("Input Port Name") .description("The name of the Input Port to deliver data to.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor SSL_CONTEXT = new PropertyDescriptor.Builder() .name("SSL Context Service") .displayName("SSL Context Service") .description("The SSL Context Service to use when communicating with the destination. If not specified, communications will not be secure.") .required(false) .identifiesControllerService(RestrictedSSLContextService.class) .build(); public static final PropertyDescriptor INSTANCE_URL = new PropertyDescriptor.Builder() .name("Instance URL") .displayName("Instance URL") .description("The URL of this instance to use in the Content URI of each event.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .defaultValue("http://${hostname(true)}:8080/nifi") .addValidator(new NiFiUrlValidator()) .build(); public static final PropertyDescriptor COMPRESS = new PropertyDescriptor.Builder() .name("Compress Events") .displayName("Compress Events") .description("Indicates whether or not to compress the data being sent.") .required(true) .allowableValues("true", "false") .defaultValue("true") .build(); public static final PropertyDescriptor TIMEOUT = new PropertyDescriptor.Builder() .name("Communications Timeout") .displayName("Communications Timeout") .description("Specifies how long to wait to a response from the destination before deciding that an error has occurred and canceling the transaction") .required(true) .defaultValue("30 secs") .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .build(); public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder() .name("Batch Size") .displayName("Batch Size") .description("Specifies how many records to send in a single batch, at most.") .required(true) .defaultValue("1000") .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .build(); public static final PropertyDescriptor TRANSPORT_PROTOCOL = new PropertyDescriptor.Builder() .name("s2s-transport-protocol") .displayName("Transport Protocol") .description("Specifies which transport protocol to use for Site-to-Site communication.") .required(true) .allowableValues(SiteToSiteTransportProtocol.values()) .defaultValue(SiteToSiteTransportProtocol.RAW.name()) .build(); public static final PropertyDescriptor HTTP_PROXY_HOSTNAME = new PropertyDescriptor.Builder() .name("s2s-http-proxy-hostname") .displayName("HTTP Proxy hostname") .description("Specify the proxy server's hostname to use. If not specified, HTTP traffics are sent directly to the target NiFi instance.") .required(false) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); public static final PropertyDescriptor HTTP_PROXY_PORT = new PropertyDescriptor.Builder() .name("s2s-http-proxy-port") .displayName("HTTP Proxy port") .description("Specify the proxy server's port number, optional. If not specified, default port 80 will be used.") .required(false) .addValidator(StandardValidators.PORT_VALIDATOR) .build(); public static final PropertyDescriptor HTTP_PROXY_USERNAME = new PropertyDescriptor.Builder() .name("s2s-http-proxy-username") .displayName("HTTP Proxy username") .description("Specify an user name to connect to the proxy server, optional.") .required(false) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); public static final PropertyDescriptor HTTP_PROXY_PASSWORD = new PropertyDescriptor.Builder() .name("s2s-http-proxy-password") .displayName("HTTP Proxy password") .description("Specify an user password to connect to the proxy server, optional.") .required(false) .sensitive(true) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .build(); public static final PropertyDescriptor PLATFORM = new PropertyDescriptor.Builder() .name("Platform") .description("The value to use for the platform field in each event.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .defaultValue("nifi") .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static SiteToSiteClient getClient(ReportingContext reportContext, ComponentLog logger) { final SSLContextService sslContextService = reportContext.getProperty(SiteToSiteUtils.SSL_CONTEXT).asControllerService(SSLContextService.class); final SSLContext sslContext = sslContextService == null ? null : sslContextService.createSSLContext(SSLContextService.ClientAuth.REQUIRED); final EventReporter eventReporter = (EventReporter) (severity, category, message) -> { switch (severity) { case WARNING: logger.warn(message); break; case ERROR: logger.error(message); break; default: break; } }; final String destinationUrl = reportContext.getProperty(SiteToSiteUtils.DESTINATION_URL).evaluateAttributeExpressions().getValue(); final SiteToSiteTransportProtocol mode = SiteToSiteTransportProtocol.valueOf(reportContext.getProperty(SiteToSiteUtils.TRANSPORT_PROTOCOL).getValue()); final HttpProxy httpProxy = mode.equals(SiteToSiteTransportProtocol.RAW) || StringUtils.isEmpty(reportContext.getProperty(SiteToSiteUtils.HTTP_PROXY_HOSTNAME).getValue()) ? null : new HttpProxy(reportContext.getProperty(SiteToSiteUtils.HTTP_PROXY_HOSTNAME).getValue(), reportContext.getProperty(SiteToSiteUtils.HTTP_PROXY_PORT).asInteger(), reportContext.getProperty(SiteToSiteUtils.HTTP_PROXY_USERNAME).getValue(), reportContext.getProperty(SiteToSiteUtils.HTTP_PROXY_PASSWORD).getValue()); return new SiteToSiteClient.Builder() .urls(SiteToSiteRestApiClient.parseClusterUrls(destinationUrl)) .portName(reportContext.getProperty(SiteToSiteUtils.PORT_NAME).getValue()) .useCompression(reportContext.getProperty(SiteToSiteUtils.COMPRESS).asBoolean()) .eventReporter(eventReporter) .sslContext(sslContext) .timeout(reportContext.getProperty(SiteToSiteUtils.TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS) .transportProtocol(mode) .httpProxy(httpProxy) .stateManager(reportContext.getStateManager()) .build(); } public static class NiFiUrlValidator implements Validator { @Override public ValidationResult validate(final String subject, final String input, final ValidationContext context) { final String value = context.newPropertyValue(input).evaluateAttributeExpressions().getValue(); try { SiteToSiteRestApiClient.parseClusterUrls(value); return new ValidationResult.Builder() .input(input) .subject(subject) .valid(true) .build(); } catch (IllegalArgumentException ex) { return new ValidationResult.Builder() .input(input) .subject(subject) .valid(false) .explanation(ex.getLocalizedMessage()) .build(); } } } }
/** * */ package edu.ucdenver.ccp.datasource.fileparsers.jaxb; /* * #%L * Colorado Computational Pharmacology's common module * %% * Copyright (C) 2012 - 2015 Regents of the University of Colorado * %% * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.HashSet; import java.util.NoSuchElementException; import java.util.Set; import java.util.zip.GZIPInputStream; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.stream.EventFilter; import javax.xml.stream.XMLEventReader; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.events.StartElement; import javax.xml.stream.events.XMLEvent; import edu.ucdenver.ccp.datasource.fileparsers.FileRecord; import edu.ucdenver.ccp.datasource.fileparsers.FileRecordReader; import edu.ucdenver.ccp.datasource.identifiers.impl.bio.NcbiTaxonomyID; /** * @author Colorado Computational Pharmacology, UC Denver; * [email protected] * @param <T> * */ public abstract class XmlFileRecordReader<T extends FileRecord> extends FileRecordReader<T> { private XMLEventReader xmlfer; private XMLEventReader xmler; private InputStream is; private JAXBContext ctx; private Unmarshaller um; private T nextRecord = null; private final Set<NcbiTaxonomyID> taxonsOfInterest; private final Class<?> entryClass; private final Set<String> excludeElements; // public XmlFileRecordReader(File workDirectory, boolean clean) throws // IOException { // this(workDirectory, clean, null); // } public XmlFileRecordReader(Class<?> entryClass, File workDirectory, boolean clean, Set<NcbiTaxonomyID> taxonIds) throws IOException { this(entryClass, workDirectory, clean, taxonIds, new HashSet<String>()); } public XmlFileRecordReader(Class<?> entryClass, File workDirectory, boolean clean, Set<NcbiTaxonomyID> taxonIds, Set<String> excludeElements) throws IOException { super(workDirectory, null, null, null, null, clean); this.entryClass = entryClass; this.taxonsOfInterest = taxonIds; this.excludeElements = excludeElements; try { initialize(initializeInputStreamFromDownload()); } catch (XMLStreamException e) { throw new RuntimeException(e); } catch (JAXBException e) { throw new RuntimeException(e); } } // public XmlFileRecordReader(File dataFile) throws IOException { // this(dataFile, null); // } public XmlFileRecordReader(Class<?> entryClass, File dataFile, Set<NcbiTaxonomyID> taxonIds) throws IOException { this(entryClass, dataFile, taxonIds, new HashSet<String>()); } /** * @param dataFile * @param encoding * @param skipLinePrefix * @throws IOException */ public XmlFileRecordReader(Class<?> entryClass, File dataFile, Set<NcbiTaxonomyID> taxonIds, Set<String> excludeElements) throws IOException { super(dataFile, null, null); this.entryClass = entryClass; this.taxonsOfInterest = taxonIds; this.excludeElements = excludeElements; try { InputStream is; if (dataFile.getName().endsWith(".gz")) { is = new GZIPInputStream(new FileInputStream(dataFile)); } else { is = new FileInputStream(dataFile); } initialize(is); } catch (XMLStreamException e) { throw new RuntimeException(e); } catch (JAXBException e) { throw new RuntimeException(e); } } protected InputStream initializeInputStreamFromDownload() throws IOException { throw new UnsupportedOperationException( "The initializeInputStreamFromDownload() method is designed to be used " + "when a subclass of this class is automatically obtaining the input file. The subclass should initialize " + "the InputStream that will serve the UniProt XML to the XML parsing code."); } /** * @param dataFile * @throws FileNotFoundException * @throws XMLStreamException * @throws JAXBException */ private void initialize(InputStream is) throws FileNotFoundException, XMLStreamException, JAXBException { this.is = is; ctx = JAXBContext.newInstance(entryClass); um = ctx.createUnmarshaller(); XMLInputFactory xmlif = XMLInputFactory.newInstance(); xmler = xmlif.createXMLEventReader(is); EventFilter filter = new EventFilter() { public boolean accept(XMLEvent event) { return event.isStartElement(); } }; xmlfer = xmlif.createFilteredReader(xmler, filter); // Jump to the first element in the document, the enclosing Uniprot in // the case of uniprot // xml StartElement e = (StartElement) xmlfer.nextEvent(); advanceToRecordWithTaxonOfInterest(); } private void advanceToRecordWithTaxonOfInterest() { if (hasNext()) { while (nextRecord != null && !hasTaxonOfInterest(nextRecord)) { next(); hasNext(); } } } /** * @param nextRecord2 * @return */ protected abstract boolean hasTaxonOfInterest(T record); protected Set<NcbiTaxonomyID> getTaxonsOfInterest() { if (taxonsOfInterest == null) { return null; } return new HashSet<NcbiTaxonomyID>(taxonsOfInterest); } /* * (non-Javadoc) * * @see java.io.Closeable#close() */ @Override public void close() throws IOException { is.close(); } /* * (non-Javadoc) * * @see java.util.Iterator#hasNext() */ @Override public boolean hasNext() { if (nextRecord == null) { try { while (xmlfer.peek() != null) { JAXBElement<?> unmarshalledElement = um.unmarshal(xmler,entryClass); if (excludeElements.contains(unmarshalledElement.getName().getLocalPart())) { continue; } Object o = unmarshalledElement.getValue(); if (entryClass.isInstance(o)) { nextRecord = initializeNewRecord(entryClass.cast(o)); if (hasTaxonOfInterest(nextRecord)) { return true; } nextRecord = null; } } return false; } catch (JAXBException e) { throw new RuntimeException(e); } catch (XMLStreamException e) { throw new RuntimeException(e); } } return true; } /** * @param entryXmlClassInstance * an instance of the class that is the "entry" into the XML, i.e. the thing you * want to iterate over * @return */ protected abstract T initializeNewRecord(Object entryXmlClassInstance); // protected UniProtFileRecord initializeNewRecord(Entry entry) { // return new UniProtFileRecord(entry); // } /* * (non-Javadoc) * * @see java.util.Iterator#next() */ @Override public T next() { if (!hasNext()) { throw new NoSuchElementException(); } T record = nextRecord; nextRecord = null; return record; } }
/* * Copyright [2006] PurePerfect.com * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. * * See the License for the specific language governing permissions * and limitations under the License. */ package com.pureperfect.purview; import java.lang.annotation.Annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.reflect.Method; import junit.framework.TestCase; import com.pureperfect.purview.util.GetterMethodFilter; import com.pureperfect.purview.util.MakeAccessibleFieldFilter; import com.pureperfect.purview.validators.NoValidation; import com.pureperfect.purview.validators.NotNull; /** * Unit test for {@link Purview}. * * @author J. Chris Folsom * @version 1.1 * @since 1.0 */ public class PurviewTest extends TestCase { /** * Stub class for testing. */ public class FieldStub { @SuppressWarnings("unused") @NotNull private String name; public void setName(final String name) { this.name = name; } } /** * Stub class for testing. */ public class FieldStubStrictFail { public String name; } /** * Stub class for testing. */ public class FieldStubStrictPass { @NoValidation public String name; } /** * Stub class for testing. */ public class MethodStub { private String name; @NotNull public String getName() { return this.name; } public void setName(final String name) { this.name = name; } } /** * Stub class for testing. */ public class MethodStubStrictFail { private String name; public String getName() { return this.name; } public void setName(final String name) { this.name = name; } } /** * Stub class for testing. */ public class MethodStubStrictPass { private String name; @NoValidation public String getName() { return this.name; } public void setName(final String name) { this.name = name; } } /** * Stub class for testing. */ @Retention(RetentionPolicy.RUNTIME) public @interface MockTypeValidator { @SuppressWarnings( { "unchecked", "rawtypes" }) public class ValidatorImpl implements Validator<ValidationProblem, Object, Annotation, Object, Object> { public ValidationProblem validate(final Object instance, final Annotation annotation, final Object target, final Object value) { return new ValidationProblem(null, null, null, null) { // mock }; } } Class<?> validator() default ValidatorImpl.class; } /** * Stub class for testing. */ @NonValidationAnnotation public class NonValidatedType { @NonValidationAnnotation public String getName() { return "chris"; } } /** * Stub class for testing. */ @Retention(RetentionPolicy.RUNTIME) @Target( { ElementType.METHOD, ElementType.TYPE }) public @interface NonValidationAnnotation { // do nothing } /** * Stub class for testing. */ public class ParamStub { public final void testMe(@NotNull final String name) { // do nothing } } /** * Stub class for testing. */ public class ParamStubStrictFail { public final void testMe(final String name) { // do nothing } } /** * Stub class for testing. */ public class ParamStubStrictPass { public final void testMe(@NoValidation final String name) { // do nothing } } /** * Stub class for testing. */ @MockTypeValidator public class TypeStubFail { // blah blah } /** * Stub class for testing. */ public class TypeStubPass { // blah blah } /** * Stub class for testing. */ public class TypeStubStrictFail { // blah blah } /** * Stub class for testing. */ @NoValidation public class TypeStubStrictPass { // blah blah } /** * Test validate fields. */ public void testValidateFields() { final FieldStub mockType = new FieldStub(); assertEquals(1, Purview.validateFields(mockType).getProblems().size()); mockType.setName("name"); assertEquals(0, Purview.validateFields(mockType).getProblems().size()); } /** * Test validate fields with a custom filter. */ public void testValidateFieldsCustomFilter() { final FieldStub mockType = new FieldStub(); assertEquals( 1, Purview.validateFields(mockType, MakeAccessibleFieldFilter.defaultInstance()) .getProblems().size()); mockType.setName("name"); assertEquals( 0, Purview.validateFields(mockType, MakeAccessibleFieldFilter.defaultInstance()) .getProblems().size()); } /** * Test strict mode for validate fields. */ public void testValidateFieldsStrict() { final FieldStub mockType = new FieldStub(); assertEquals(1, Purview.validateFields(mockType).getProblems().size()); mockType.setName("name"); assertEquals(0, Purview.validateFields(mockType).getProblems().size()); // Test missing NoValidation annotation. final FieldStubStrictFail fail = new FieldStubStrictFail(); try { Purview.validateFields(fail, true); fail("Should have thrown exception"); } catch (final ValidationException e) { // should have thrown exception System.err.println(e.getMessage()); } // Test NoValidation annotation final FieldStubStrictPass pass = new FieldStubStrictPass(); assertEquals(0, Purview.validateFields(pass, true).getProblems().size()); } /** * Test strict mode for validate fields with a custom filter. */ public void testValidateFieldsStrictCustomFilter() { final FieldStub mockType = new FieldStub(); assertEquals(1, Purview.validateFields(mockType).getProblems().size()); mockType.setName("name"); assertEquals(0, Purview.validateFields(mockType).getProblems().size()); // Test missing NoValidation annotation. final FieldStubStrictFail fail = new FieldStubStrictFail(); try { Purview.validateFields(fail, MakeAccessibleFieldFilter.defaultInstance(), true); fail("Should have thrown exception"); } catch (final ValidationException e) { // should have thrown exception System.err.println(e.getMessage()); } // Test NoValidation annotation final FieldStubStrictPass pass = new FieldStubStrictPass(); assertEquals( 0, Purview.validateFields(pass, MakeAccessibleFieldFilter.defaultInstance(), true) .getProblems().size()); } /** * Test validate methods. */ public void testValidateMethods() { final MethodStub mockType = new MethodStub(); assertEquals(1, Purview.validateMethods(mockType).getProblems().size()); mockType.setName("name"); assertEquals(0, Purview.validateMethods(mockType).getProblems().size()); } /** * Test validate methods with a custom filter. */ public void testValidateMethodsCustomFilter() { final MethodStub mockType = new MethodStub(); assertEquals( 1, Purview.validateMethods(mockType, GetterMethodFilter.defaultInstance()).getProblems() .size()); mockType.setName("name"); assertEquals( 0, Purview.validateMethods(mockType, GetterMethodFilter.defaultInstance()).getProblems() .size()); } /** * Test validate methods in strict mode. */ public void testValidateMethodsStrict() { // Test normal final MethodStub mockType = new MethodStub(); assertEquals(1, Purview.validateMethods(mockType, true).getProblems() .size()); mockType.setName("name"); assertEquals(0, Purview.validateMethods(mockType, true).getProblems() .size()); // Test strict mode missing annotation final MethodStubStrictFail fail = new MethodStubStrictFail(); try { Purview.validateMethods(fail, true); fail("Should have thrown validation exception"); } catch (final ValidationException e) { // Okay. Strict mode is on so it should have failed. System.err.println(e.getMessage()); } final MethodStubStrictPass pass = new MethodStubStrictPass(); assertEquals(0, Purview.validateMethods(pass, true).getProblems() .size()); } /** * Test validate methods in strict mode with a custom filter. */ public void testValidateMethodsStrictCustomFilter() { // Test normal final MethodStub mockType = new MethodStub(); assertEquals( 1, Purview.validateMethods(mockType, GetterMethodFilter.defaultInstance(), true) .getProblems().size()); mockType.setName("name"); assertEquals( 0, Purview.validateMethods(mockType, GetterMethodFilter.defaultInstance(), true) .getProblems().size()); // Test strict mode missing annotation final MethodStubStrictFail fail = new MethodStubStrictFail(); try { Purview.validateMethods(fail, GetterMethodFilter.defaultInstance(), true); fail("Should have thrown validation exception"); } catch (final ValidationException e) { // Okay. Strict mode is on so it should have failed. System.err.println(e.getMessage()); } final MethodStubStrictPass pass = new MethodStubStrictPass(); assertEquals( 0, Purview.validateMethods(pass, GetterMethodFilter.defaultInstance(), true) .getProblems().size()); } /** * Test validate parameters. */ @SuppressWarnings("rawtypes") public void testValidateParameters() throws SecurityException, NoSuchMethodException { final ParamStub mock = new ParamStub(); final Class[] paramTypes = new Class[] { String.class }; final Method method = mock.getClass().getMethod("testMe", paramTypes); assertEquals(1, Purview.validateParameters(mock, method, new Object[] { null }).getProblems().size()); assertEquals(0, Purview.validateParameters(mock, method, new Object[] { "should pass" }).getProblems().size()); } /** * Test validate parameters in strict mode. */ @SuppressWarnings("rawtypes") public void testValidateParametersStrict() throws SecurityException, NoSuchMethodException { final ParamStubStrictPass pass = new ParamStubStrictPass(); final Class[] paramTypes = new Class[] { String.class }; final Method passMethod = pass.getClass().getMethod("testMe", paramTypes); assertEquals(0, Purview.validateParameters(pass, passMethod, new Object[] { null }, true).getProblems().size()); final ParamStubStrictFail fail = new ParamStubStrictFail(); final Method failMethod = fail.getClass().getMethod("testMe", paramTypes); try { Purview.validateParameters(fail, failMethod, new Object[] { null }, true); fail("Should have thrown an exception"); } catch (final ValidationException e) { // should throw exception System.err.println(e.getMessage()); } } /** * Test validate type. */ public void testValidateType() { final Object pass = new TypeStubPass(); assertEquals(0, Purview.validateType(pass).getProblems().size()); final Object fail = new TypeStubFail(); assertEquals(1, Purview.validateType(fail).getProblems().size()); } /** * Test validate type in strict mode. */ public void testValidateTypeStrict() { final Object pass = new TypeStubStrictPass(); assertEquals(0, Purview.validateType(pass, true).getProblems().size()); final Object fail = new TypeStubStrictFail(); try { Purview.validateType(fail, true); fail("Should have thrown exception"); } catch (final ValidationException e) { // should have thrown exception System.err.println(e.getMessage()); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * created at Sep 11, 2001 * @author Jeka */ package com.intellij.refactoring.move.moveMembers; import com.intellij.codeInsight.highlighting.ReadWriteAccessDetector; import com.intellij.lang.LanguageExtension; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Ref; import com.intellij.psi.*; import com.intellij.psi.impl.source.resolve.JavaResolveUtil; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.MethodSignatureUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.BaseRefactoringProcessor; import com.intellij.refactoring.HelpID; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.refactoring.move.MoveCallback; import com.intellij.refactoring.move.MoveHandler; import com.intellij.refactoring.move.MoveMemberViewDescriptor; import com.intellij.refactoring.util.*; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageViewDescriptor; import com.intellij.usageView.UsageViewUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.VisibilityUtil; import com.intellij.util.containers.HashMap; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import java.util.*; public class MoveMembersProcessor extends BaseRefactoringProcessor { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.move.moveMembers.MoveMembersProcessor"); private PsiClass myTargetClass; private final Set<PsiMember> myMembersToMove = new LinkedHashSet<PsiMember>(); private final MoveCallback myMoveCallback; private String myNewVisibility; // "null" means "as is" private String myCommandName = MoveMembersImpl.REFACTORING_NAME; private boolean myMakeEnumConstant; private MoveMembersOptions myOptions; public MoveMembersProcessor(Project project, MoveCallback moveCallback, MoveMembersOptions options) { super(project); myMoveCallback = moveCallback; setOptions(options); } public MoveMembersProcessor(Project project, MoveMembersOptions options) { this(project, null, options); } protected String getCommandName() { return myCommandName; } private void setOptions(MoveMembersOptions dialog) { myOptions = dialog; PsiMember[] members = dialog.getSelectedMembers(); myMembersToMove.clear(); myMembersToMove.addAll(Arrays.asList(members)); setCommandName(members); final PsiManager manager = PsiManager.getInstance(myProject); myTargetClass = JavaPsiFacade.getInstance(manager.getProject()).findClass(dialog.getTargetClassName(), GlobalSearchScope.projectScope(myProject)); myNewVisibility = dialog.getMemberVisibility(); myMakeEnumConstant = dialog.makeEnumConstant(); } private void setCommandName(final PsiMember[] members) { StringBuilder commandName = new StringBuilder(); commandName.append(MoveHandler.REFACTORING_NAME); commandName.append(" "); boolean first = true; for (PsiMember member : members) { if (!first) commandName.append(", "); commandName.append(UsageViewUtil.getType(member)); commandName.append(' '); commandName.append(UsageViewUtil.getShortName(member)); first = false; } myCommandName = commandName.toString(); } protected UsageViewDescriptor createUsageViewDescriptor(UsageInfo[] usages) { return new MoveMemberViewDescriptor(myMembersToMove.toArray(new PsiElement[myMembersToMove.size()])); } @NotNull protected UsageInfo[] findUsages() { final List<UsageInfo> usagesList = new ArrayList<UsageInfo>(); for (PsiMember member : myMembersToMove) { for (PsiReference psiReference : ReferencesSearch.search(member)) { PsiElement ref = psiReference.getElement(); final MoveMemberHandler handler = MoveMemberHandler.EP_NAME.forLanguage(ref.getLanguage()); MoveMembersUsageInfo usage = null; if (handler != null) { usage = handler.getUsage(member, psiReference, myMembersToMove, myTargetClass); } if (usage != null) { usagesList.add(usage); } else { if (!isInMovedElement(ref)) { usagesList.add(new MoveMembersUsageInfo(member, ref, null, ref, psiReference)); } } } } UsageInfo[] usageInfos = usagesList.toArray(new UsageInfo[usagesList.size()]); usageInfos = UsageViewUtil.removeDuplicatedUsages(usageInfos); return usageInfos; } protected void refreshElements(PsiElement[] elements) { LOG.assertTrue(myMembersToMove.size() == elements.length); myMembersToMove.clear(); for (PsiElement resolved : elements) { myMembersToMove.add((PsiMember)resolved); } } private boolean isInMovedElement(PsiElement element) { for (PsiMember member : myMembersToMove) { if (PsiTreeUtil.isAncestor(member, element, false)) return true; } return false; } protected void performRefactoring(final UsageInfo[] usages) { try { // correct references to moved members from the outside LanguageExtension<MoveMemberHandler> extension=new LanguageExtension<MoveMemberHandler>("com.intellij.refactoring.moveMemberHandler"); PsiClass targetClass = JavaPsiFacade.getInstance(myProject) .findClass(myOptions.getTargetClassName(), GlobalSearchScope.projectScope(myProject)); if (targetClass == null) return; final Map<PsiMember, PsiElement> anchors = new HashMap<PsiMember, PsiElement>(); for (PsiMember member : myMembersToMove) { anchors.put(member, extension.forLanguage(member.getLanguage()).getAnchor(member, targetClass)); } ArrayList<MoveMembersUsageInfo> otherUsages = new ArrayList<MoveMembersUsageInfo>(); for (UsageInfo usageInfo : usages) { MoveMembersUsageInfo usage = (MoveMembersUsageInfo)usageInfo; if (!usage.reference.isValid()) continue; final MoveMemberHandler handler = extension.forLanguage(usageInfo.getElement().getLanguage()); if (handler!=null) { if (handler.changeExternalUsage(myOptions, usage)) continue; } otherUsages.add(usage); } // correct references inside moved members and outer references to Inner Classes for (PsiMember member : myMembersToMove) { ArrayList<PsiReference> refsToBeRebind = new ArrayList<PsiReference>(); for (Iterator<MoveMembersUsageInfo> iterator = otherUsages.iterator(); iterator.hasNext();) { MoveMembersUsageInfo info = iterator.next(); if (member.equals(info.member)) { PsiReference ref = info.getReference(); if (ref != null) { refsToBeRebind.add(ref); } iterator.remove(); } } final RefactoringElementListener elementListener = getTransaction().getElementListener(member); final MoveMemberHandler handler = extension.forLanguage(member.getLanguage()); PsiMember newMember=handler.doMove(myOptions, member, anchors.get(member), targetClass); elementListener.elementMoved(newMember); fixVisibility(newMember, usages); for (PsiReference reference : refsToBeRebind) { reference.bindToElement(newMember); } } // qualifier info must be decoded after members are moved //ChangeContextUtil.decodeContextInfo(myTargetClass, null, null); final MoveMemberHandler handler = MoveMemberHandler.EP_NAME.forLanguage(myTargetClass.getLanguage()); if (handler != null) handler.decodeContextInfo(myTargetClass); myMembersToMove.clear(); if (myMoveCallback != null) { myMoveCallback.refactoringCompleted(); } } catch (IncorrectOperationException e) { LOG.error(e); } } private void fixVisibility(PsiMember newMember, final UsageInfo[] usages) throws IncorrectOperationException { PsiModifierList modifierList = newMember.getModifierList(); if(myTargetClass.isInterface()) { modifierList.setModifierProperty(PsiModifier.PUBLIC, false); modifierList.setModifierProperty(PsiModifier.PROTECTED, false); modifierList.setModifierProperty(PsiModifier.PRIVATE, false); return; } if(myNewVisibility == null) return; if (VisibilityUtil.ESCALATE_VISIBILITY.equals(myNewVisibility)) { for (UsageInfo usage : usages) { if (usage instanceof MoveMembersUsageInfo) { final PsiElement place = usage.getElement(); if (place != null) { VisibilityUtil.escalateVisibility(newMember, place); } } } } else { RefactoringConflictsUtil.setVisibility(modifierList, myNewVisibility); } } protected boolean preprocessUsages(Ref<UsageInfo[]> refUsages) { final MultiMap<PsiElement, String> conflicts = new MultiMap<PsiElement, String>(); final UsageInfo[] usages = refUsages.get(); try { addInaccessiblleConflicts(conflicts, usages); } catch (IncorrectOperationException e) { LOG.error(e); } analyzeMoveConflicts(myMembersToMove, myTargetClass, myNewVisibility, conflicts); RefactoringConflictsUtil.analyzeModuleConflicts(myProject, myMembersToMove, usages, myTargetClass, conflicts); return showConflicts(conflicts); } private void addInaccessiblleConflicts(final MultiMap<PsiElement, String> conflicts, final UsageInfo[] usages) throws IncorrectOperationException { String newVisibility = myNewVisibility; if (VisibilityUtil.ESCALATE_VISIBILITY.equals(newVisibility)) { //Still need to check for access object newVisibility = PsiModifier.PUBLIC; } Map<PsiMember, PsiModifierList> modifierListCopies = new HashMap<PsiMember, PsiModifierList>(); for (PsiMember member : myMembersToMove) { PsiModifierList copy = member.getModifierList(); if (copy!=null) copy= (PsiModifierList)copy.copy(); if (newVisibility != null) { if (copy!=null) RefactoringConflictsUtil.setVisibility(copy, newVisibility); } modifierListCopies.put(member, copy); } for (UsageInfo usage : usages) { if (usage instanceof MoveMembersUsageInfo) { final MoveMembersUsageInfo usageInfo = (MoveMembersUsageInfo)usage; PsiElement element = usage.getElement(); if (element != null) { final PsiMember member = usageInfo.member; if (element instanceof PsiReferenceExpression) { PsiExpression qualifier = ((PsiReferenceExpression)element).getQualifierExpression(); PsiClass accessObjectClass = null; if (qualifier != null) { accessObjectClass = (PsiClass)PsiUtil.getAccessObjectClass(qualifier).getElement(); } if (!JavaResolveUtil.isAccessible(member, myTargetClass, modifierListCopies.get(member), element, accessObjectClass, null)) { newVisibility = newVisibility == null ? VisibilityUtil.getVisibilityStringToDisplay(member) : newVisibility; String message = CommonRefactoringUtil.capitalize(RefactoringBundle.message("0.with.1.visibility.is.not.accesible.from.2", RefactoringUIUtil.getDescription(member, false), newVisibility, RefactoringUIUtil.getDescription(ConflictsUtil.getContainer(element), true))); conflicts.putValue(member, message); } } if (member instanceof PsiField && myTargetClass.isInterface()) { final ReadWriteAccessDetector accessDetector = ReadWriteAccessDetector.findDetector(member); if (accessDetector != null) { final ReadWriteAccessDetector.Access access = accessDetector.getExpressionAccess(element); if (access != ReadWriteAccessDetector.Access.Read) { conflicts.putValue(element, CommonRefactoringUtil.capitalize(RefactoringUIUtil.getDescription(member, true)) + " has write access but is moved to an interface"); } } } } } } } public void doRun() { if (myMembersToMove.isEmpty()){ String message = RefactoringBundle.message("no.members.selected"); CommonRefactoringUtil.showErrorMessage(MoveMembersImpl.REFACTORING_NAME, message, HelpID.MOVE_MEMBERS, myProject); return; } super.doRun(); } private static void analyzeMoveConflicts(@NotNull Set<PsiMember> membersToMove, final PsiClass targetClass, final String newVisibility, MultiMap<PsiElement, String> conflicts) { for (final PsiMember member : membersToMove) { if (member instanceof PsiMethod) { PsiMethod method = (PsiMethod)member; if (hasMethod(targetClass, method)) { String message = RefactoringBundle.message("0.already.exists.in.the.target.class", RefactoringUIUtil.getDescription(method, false)); message = CommonRefactoringUtil.capitalize(message); conflicts.putValue(method, message); } } else if (member instanceof PsiField) { PsiField field = (PsiField)member; if (hasField(targetClass, field)) { String message = RefactoringBundle.message("0.already.exists.in.the.target.class", RefactoringUIUtil.getDescription(field, false)); message = CommonRefactoringUtil.capitalize(message); conflicts.putValue(field, message); } } } RefactoringConflictsUtil.analyzeAccessibilityConflicts(membersToMove, targetClass, conflicts, newVisibility); } private static boolean hasMethod(PsiClass targetClass, PsiMethod method) { PsiMethod[] targetClassMethods = targetClass.getMethods(); for (PsiMethod method1 : targetClassMethods) { if (MethodSignatureUtil.areSignaturesEqual(method.getSignature(PsiSubstitutor.EMPTY), method1.getSignature(PsiSubstitutor.EMPTY))) { return true; } } return false; } private static boolean hasField(PsiClass targetClass, PsiField field) { String fieldName = field.getName(); PsiField[] targetClassFields = targetClass.getFields(); for (PsiField targetClassField : targetClassFields) { if (fieldName.equals(targetClassField.getName())) { return true; } } return false; } public List<PsiElement> getMembers() { return new ArrayList<PsiElement>(myMembersToMove); } public PsiClass getTargetClass() { return myTargetClass; } public static class MoveMembersUsageInfo extends MoveRenameUsageInfo { public final PsiClass qualifierClass; public final PsiElement reference; public final PsiMember member; public MoveMembersUsageInfo(PsiMember member, PsiElement element, PsiClass qualifierClass, PsiElement highlightElement, final PsiReference ref) { super(highlightElement, ref, member); this.member = member; this.qualifierClass = qualifierClass; reference = element; } } }
// -*- mode: java; c-basic-offset: 2; -*- // Copyright 2009-2011 Google, All Rights reserved // Copyright 2011-2017 MIT, All rights reserved // Released under the Apache License, Version 2.0 // http://www.apache.org/licenses/LICENSE-2.0 package com.google.appinventor.buildserver; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.List; import java.util.Properties; import java.util.logging.Logger; /** * This class gives access to Young Android project files. * * <p>A Young Android project file is essentially a Java properties file. * * @author [email protected] (Mark Friedman) */ public final class Project { /** * Representation of a source file containing its name and file location. */ public static class SourceDescriptor { // Qualified name of the class defined by the source file private final String qualifiedName; // File descriptor for the source private final File file; private SourceDescriptor(String qualifiedName, File file) { this.qualifiedName = qualifiedName; this.file = file; } /** * Returns the qualified name of the class defined by the source file. * * @return class name of source file */ public String getQualifiedName() { return qualifiedName; } /** * Returns a file descriptor for the source file * * @return file descriptor */ public File getFile() { return file; } } /* * Property tags defined in the project file: * * main - qualified name of main form class * name - application name * icon - application icon * versioncode - version code * versionname - version name * source - comma separated list of source root directories * assets - assets directory (for image and data files bundled with the application) * build - output directory for the compiler * useslocation - flag indicating whether or not the project uses locations * aname - the human-readable application name * androidminsdk - the minimum Android sdk required for the app * theme - the base theme for the app * color.primary - the primary color for the theme * color.primary.dark - the dark color for the theme (not yet applicable) * color.accent - the accent color used in the app theme */ private static final String MAINTAG = "main"; private static final String NAMETAG = "name"; private static final String ICONTAG = "icon"; private static final String SOURCETAG = "source"; private static final String VCODETAG = "versioncode"; private static final String VNAMETAG = "versionname"; private static final String ASSETSTAG = "assets"; private static final String BUILDTAG = "build"; private static final String USESLOCATIONTAG = "useslocation"; private static final String ANAMETAG = "aname"; private static final String ANDROID_MIN_SDK_TAG = "androidminsdk"; private static final String ACTIONBAR_TAG = "actionbar"; private static final String COLOR_THEMETAG = "theme"; private static final String COLOR_PRIMARYTAG = "color.primary"; private static final String COLOR_PRIMARY_DARKTAG = "color.primary.dark"; private static final String COLOR_ACCENTTAG = "color.accent"; // Table containing project properties private Properties properties; // Project directory. This directory contains the project.properties file. private String projectDir; // Build output directory override, or null. private String buildDirOverride; // List of source files private List<SourceDescriptor> sources; // Logging support private static final Logger LOG = Logger.getLogger(Project.class.getName()); /** * Creates a new Young Android project descriptor. * * @param projectFile path to project file */ public Project(String projectFile) { this(new File(projectFile)); } /** * Creates a new Young Android project descriptor. * * @param projectFile path to project file * @param buildDirOverride build output directory override, or null */ public Project(String projectFile, String buildDirOverride) { this(new File(projectFile)); this.buildDirOverride = buildDirOverride; } /** * Creates a new Young Android project descriptor. * * @param file project file */ public Project(File file) { try { File parentFile = Preconditions.checkNotNull(file.getParentFile()); projectDir = parentFile.getAbsolutePath(); // Load project file properties = new Properties(); FileInputStream in = new FileInputStream(file); try { properties.load(in); } finally { in.close(); } } catch (IOException e) { e.printStackTrace(); } } /** * Returns the name of the main form class * * @return main form class name */ public String getMainClass() { return properties.getProperty(MAINTAG); } /** * Sets the name of the main form class. * * @param main main form class name */ public void setMainClass(String main) { properties.setProperty(MAINTAG, main); } /** * Returns the name of the project (application). * * @return project name */ public String getProjectName() { return properties.getProperty(NAMETAG); } /** * Sets the name of the project (application) * * @param name project name */ public void setProjectName(String name) { properties.setProperty(NAMETAG, name); } /** * Returns the name of the icon * * @return icon name */ public String getIcon() { return properties.getProperty(ICONTAG); } /** * Sets the name of the icon * * @param icon icon name */ public void setIcon(String icon) { properties.setProperty(ICONTAG, icon); } /** * Returns the version code. * * @return version code */ public String getVCode() { return properties.getProperty(VCODETAG); } /** * Sets the version code. * * @param vcode version code */ public void setVCode(String vcode) { properties.setProperty(VCODETAG, vcode); } /** * Returns the version name. * * @return version name */ public String getVName() { return properties.getProperty(VNAMETAG); } /** * Sets the version name. * * @param vname version name */ public void setVName(String vname) { properties.setProperty(VNAMETAG, vname); } /** * gets the useslocation property * * @return useslocation property */ public String getUsesLocation() { String retval = properties.getProperty(USESLOCATIONTAG); if (retval == null) // Older Projects won't have this retval = "False"; return retval; } /** * Returns the app name. * * @return app name */ public String getAName() { //The non-English character set can't be shown properly and need special encoding. String appName = properties.getProperty(ANAMETAG); try { appName = new String(appName.getBytes("ISO-8859-1"), "UTF-8"); } catch (UnsupportedEncodingException e) { } catch (NullPointerException e) { } return appName; } /** * Sets the app name. * * @param aname app name */ public void setAName(String aname) { properties.setProperty(ANAMETAG, aname); } /** * Returns the minimum SDK desired for the app. * * @return the minimum Android sdk */ public String getMinSdk() { return properties.getProperty(ANDROID_MIN_SDK_TAG, "7"); } /** * Returns whether the ActionBar should be enabled in the project. * * @return "true" if the ActionBar should be included in the project. */ public String getActionBar() { return properties.getProperty(ACTIONBAR_TAG, "false"); } /** * Returns the primary color provided by the user. * * @return primary color, or null if the default is requested */ public String getPrimaryColor() { return properties.getProperty(COLOR_PRIMARYTAG); } /** * Returns the dark primary color provided by the user. * * @return dark primary color, or null if the default is requested */ public String getPrimaryColorDark() { return properties.getProperty(COLOR_PRIMARY_DARKTAG); } /** * Returns the accent color provided by the user. * * @return accent color, or null if the default is requested */ public String getAccentColor() { return properties.getProperty(COLOR_ACCENTTAG); } /** * Returns the theme for the project set by the user. * * @return theme, or null if the default is requested */ public String getTheme() { return properties.getProperty(COLOR_THEMETAG); } /** * Returns the project directory. This directory contains the project.properties file. * * @return project directory */ public String getProjectDir() { return projectDir; } /** * Returns the location of the assets directory. * * @return assets directory */ public File getAssetsDirectory() { return new File(projectDir, properties.getProperty(ASSETSTAG)); } /** * Returns the location of the build output directory. * * @return build output directory */ public File getBuildDirectory() { if (buildDirOverride != null) { return new File(buildDirOverride); } return new File(projectDir, properties.getProperty(BUILDTAG)); } /* * Recursively visits source directories and adds found Young Android source files to the list of * source files. */ private void visitSourceDirectories(String root, File file) { if (file.isDirectory()) { // Recursively visit nested directories. for (String child : file.list()) { visitSourceDirectories(root, new File(file, child)); } } else { // Add Young Android source files to the source file list if (file.getName().endsWith(YoungAndroidConstants.YAIL_EXTENSION)) { String absName = file.getAbsolutePath(); String name = absName.substring(root.length() + 1, absName.length() - YoungAndroidConstants.YAIL_EXTENSION.length()); sources.add(new SourceDescriptor(name.replace(File.separatorChar, '.'), file)); } } } /** * Returns a list of Yail files in the project. * * @return list of source files */ public List<SourceDescriptor> getSources() { // Lazily discover source files if (sources == null) { sources = Lists.newArrayList(); String sourceTag = properties.getProperty(SOURCETAG); for (String sourceDir : sourceTag.split(",")) { File dir = new File(projectDir + File.separatorChar + sourceDir); visitSourceDirectories(dir.getAbsolutePath(), dir); } } return sources; } }
/* * Copyright (c) 2005-2008, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.carbon.mediation.statistics; import org.apache.log4j.Logger; import org.apache.synapse.aspects.ComponentType; import org.apache.synapse.aspects.statistics.StatisticsCollector; import org.apache.synapse.aspects.statistics.StatisticsRecord; import org.apache.synapse.aspects.statistics.StatisticsLog; import org.apache.synapse.aspects.statistics.view.InOutStatisticsView; import org.apache.synapse.aspects.statistics.view.StatisticsViewStrategy; import org.apache.synapse.aspects.statistics.view.SystemViewStrategy; import org.wso2.carbon.base.ServerConfiguration; import org.wso2.carbon.mediation.initializer.services.SynapseEnvironmentService; import java.util.*; /** * A thread that collects and processes statistics collected in-memory by * Synapse. Ideally this should be the only consumer to directly access Synapse * statistics. Any other potential consumers should use the MediationStatisticsObserver * API to obtain statistics from the MediationStatisticsStore. */ public class StatisticsReporterThread extends Thread { private static Logger log = Logger.getLogger(StatisticsReporterThread.class); private boolean shutdownRequested = false; private boolean tracingEnabled = false; private MediationStatisticsStore mediationStatisticsStore; /** The reference to the synapse environment service */ private SynapseEnvironmentService synapseEnvironmentService; private long delay = 5 * 1000; private final StatisticsViewStrategy systemViewStrategy = new SystemViewStrategy(); /** * This flag will be updated according to the carbon.xml defined value, if * setup as'true' the statistic collector will be disabled. */ private boolean statisticsReporterDisabled =false; public StatisticsReporterThread(SynapseEnvironmentService synEnvSvc, MediationStatisticsStore mediationStatStore) { this.synapseEnvironmentService = synEnvSvc; this.mediationStatisticsStore = mediationStatStore; } public void setDelay(long delay) { if (log.isDebugEnabled()) { log.debug("Mediation statistics reporter delay set to " + delay + " ms"); } this.delay = delay; } public void setTracingEnabled(boolean tracingEnabled) { this.tracingEnabled = tracingEnabled; } private void reportStatistics(Map<String, Map<String, InOutStatisticsView>> statsMap) { for (Map<String, InOutStatisticsView> viewMap : statsMap.values()) { for (InOutStatisticsView view : viewMap.values()) { if (view != null) { mediationStatisticsStore.updateStatistics(view); } } } } private void delay() { if (delay <= 0) { return; } try { sleep(delay); } catch (InterruptedException ignore) { } } public void run() { while (!shutdownRequested) { try { collectDataAndReport(); } catch (Throwable t) { // catch all possible errors to prevent the thread from dying log.error("Error while collecting and reporting mediation statistics", t); } } } private void collectDataAndReport() { if (log.isDebugEnabled()) { log.trace("Starting new mediation statistics collection cycle"); } StatisticsCollector statisticsCollector = synapseEnvironmentService.getSynapseEnvironment().getStatisticsCollector(); if (statisticsCollector == null) { if (log.isDebugEnabled()) { log.debug("Statistics collector is not available in the Synapse environment"); } delay(); return; } List<StatisticsRecord> records = statisticsCollector.getAndClearStatisticsRecords(); if (records == null || records.size() == 0) { // If no records are collected take a nap and try again later delay(); return; } if (tracingEnabled) { List<MessageTraceLog> traceLogs = getTraceLogs(records); if (traceLogs != null) { mediationStatisticsStore.notifyTraceLogs(traceLogs.toArray( new MessageTraceLog[traceLogs.size()])); } } // report sequence statistics reportStatistics(systemViewStrategy.determineView(records, ComponentType.SEQUENCE)); // report endpoint statistics to database reportStatistics(systemViewStrategy.determineView(records, ComponentType.ENDPOINT)); // report proxy service statistics to database reportStatistics(systemViewStrategy.determineView(records, ComponentType.PROXYSERVICE)); } private List<MessageTraceLog> getTraceLogs(List<StatisticsRecord> records) { List<MessageTraceLog> traceLogs = new ArrayList<MessageTraceLog>(); for (StatisticsRecord record : records) { if (record == null) { continue; } MessageTraceLog traceLog = new MessageTraceLog(record.getId()); List<StatisticsLog> logs = record.getAllStatisticsLogs(); StatisticsLog startLog = null; StatisticsLog endLog = null; for (StatisticsLog log : logs) { if (log == null) { continue; } if (startLog == null && log.getComponentType() != ComponentType.ANY) { startLog = log; } else if (startLog != null) { endLog = log; break; } } if (startLog == null || endLog == null) { continue; } traceLog.setType(startLog.getComponentType()); traceLog.setResourceId(startLog.getId()); switch (startLog.getComponentType()) { case PROXYSERVICE: traceLog.setRequestFaultStatus(startLog.isFault() ? MessageTraceLog.FAULT_STATUS_TRUE : MessageTraceLog.FAULT_STATUS_FALSE); if (!endLog.isEndAnyLog()) { StatisticsLog lastLog = logs.get(logs.size() - 1); traceLog.setResponseFaultStatus(lastLog.isFault() ? MessageTraceLog.FAULT_STATUS_TRUE : MessageTraceLog.FAULT_STATUS_FALSE); } break; case SEQUENCE: if (endLog.isResponse()) { traceLog.setResponseFaultStatus(endLog.isFault() ? MessageTraceLog.FAULT_STATUS_TRUE : MessageTraceLog.FAULT_STATUS_FALSE); if (!startLog.isResponse()) { traceLog.setRequestFaultStatus(startLog.isFault() ? MessageTraceLog.FAULT_STATUS_TRUE : MessageTraceLog.FAULT_STATUS_FALSE); } } else { traceLog.setRequestFaultStatus(endLog.isFault() ? MessageTraceLog.FAULT_STATUS_TRUE : MessageTraceLog.FAULT_STATUS_FALSE); } break; case ENDPOINT: traceLog.setRequestFaultStatus(endLog.isFault() ? MessageTraceLog.FAULT_STATUS_TRUE : MessageTraceLog.FAULT_STATUS_FALSE); break; } traceLogs.add(traceLog); } if (traceLogs.size() > 0) { return traceLogs; } return null; } public void shutdown() { if (log.isDebugEnabled()) { log.debug("Statistics reporter thread is being stopped"); } shutdownRequested = true; } }
/* * Copyright 2006 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Unit tests for {@link ExploitAssigns} * * @author [email protected] (Nick Santos) */ @RunWith(JUnit4.class) public final class ExploitAssignsTest extends CompilerTestCase { @Test public void testExprExploitationTypes() { test("a = true; b = true", "b = a = true"); test("a = !0; b = !0", "b = a = !0"); test("a = !1; b = !1", "b = a = !1"); test("a = void 0; b = void 0", "b = a = void 0"); test("a = -Infinity; b = -Infinity", "b = a = -Infinity"); } @Test public void testExprExploitationTypes2() { test("a = !0; b = !0", "b = a = !0"); } @Test public void testExprExploitation() { test("a = null; b = null; var c = b", "var c = b = a = null"); test("a = null; b = null", "b = a = null"); test("a = undefined; b = undefined", "b = a = undefined"); test("a = 0; b = 0", "b=a=0"); test("a = 'foo'; b = 'foo'", "b = a = \"foo\""); test("a = c; b = c", "b=a=c"); testSame("a = 0; b = 1"); testSame("a = \"foo\"; b = \"foox\""); test("a = null; a && b;", "(a = null)&&b"); test("a = null; a || b;", "(a = null)||b"); test("a = null; a ? b : c;", "(a = null) ? b : c"); test("a = null; this.foo = null;", "this.foo = a = null"); test("function f(){ a = null; return null; }", "function f(){return a = null}"); test("a = true; if (a) { foo(); }", "if (a = true) { foo() }"); test("a = true; if (a && a) { foo(); }", "if ((a = true) && a) { foo() }"); test("a = false; if (a) { foo(); }", "if (a = false) { foo() }"); test("a = !0; if (a) { foo(); }", "if (a = !0) { foo() }"); test("a = !0; if (a && a) { foo(); }", "if ((a = !0) && a) { foo() }"); test("a = !1; if (a) { foo(); }", "if (a = !1) { foo() }"); testSame("a = this.foo; a();"); test("a = b; b = a;", "b = a = b"); testSame("a = b; a.c = a"); test("this.foo = null; this.bar = null;", "this.bar = this.foo = null"); test("this.foo = null; this.bar = null; this.baz = this.bar", "this.baz = this.bar = this.foo = null"); test("this.foo = null; a = null;", "a = this.foo = null"); test("this.foo = null; a = this.foo;", "a = this.foo = null"); test("a.b.c=null; a=null;", "a = a.b.c = null"); testSame("a = null; a.b.c = null"); test("(a=b).c = null; this.b = null;", "this.b = (a=b).c = null"); testSame("if(x) a = null; else b = a"); } @Test public void testLetConstAssignment() { test( "a = null; b = null; let c = b", "let c = b = a = null"); } @Test public void testBlockScope() { test("{ a = null; b = null; c = b }", "{ c = b = a = null }"); // TODO (simranarora) What should we have as the intended behavior with block scoping? test( "a = null; b = null; { c = b; }", // "{ c = b = a = null; } "b = a = null; { c = b; }"); } @Test public void testExploitInArrowFunction() { test("() => { a = null; return null; }", "() => { return a = null }"); } @Test public void testNestedExprExploitation() { test("this.foo = null; this.bar = null; this.baz = null;", "this.baz = this.bar = this.foo = null"); test("a = 3; this.foo = a; this.bar = a; this.baz = 3;", "this.baz = this.bar = this.foo = a = 3"); test("a = 3; this.foo = a; this.bar = this.foo; this.baz = a;", "this.baz = this.bar = this.foo = a = 3"); test("a = 3; this.foo = a; this.bar = 3; this.baz = this.foo;", "this.baz = this.bar = this.foo = a = 3"); test("a = 3; this.foo = a; a = 3; this.bar = 3; " + "a = 3; this.baz = this.foo;", "this.baz = a = this.bar = a = this.foo = a = 3"); test("a = 4; this.foo = a; a = 3; this.bar = 3; " + "a = 3; this.baz = this.foo;", "this.foo = a = 4; a = this.bar = a = 3; this.baz = this.foo"); test("a = 3; this.foo = a; a = 4; this.bar = 3; " + "a = 3; this.baz = this.foo;", "this.foo = a = 3; a = 4; a = this.bar = 3; this.baz = this.foo"); test("a = 3; this.foo = a; a = 3; this.bar = 3; " + "a = 4; this.baz = this.foo;", "this.bar = a = this.foo = a = 3; a = 4; this.baz = this.foo"); } @Test public void testBug1840071() { // Some external properties are implemented as setters. Let's // make sure that we don't collapse them inappropriately. test("a.b = a.x; if (a.x) {}", "if (a.b = a.x) {}"); testSame("a.b = a.x; if (a.b) {}"); test("a.b = a.c = a.x; if (a.x) {}", "if (a.b = a.c = a.x) {}"); testSame("a.b = a.c = a.x; if (a.c) {}"); testSame("a.b = a.c = a.x; if (a.b) {}"); } @Test public void testBug2072343() { testSame("a = a.x;a = a.x"); testSame("a = a.x;b = a.x"); test("b = a.x;a = a.x", "a = b = a.x"); testSame("a.x = a;a = a.x"); testSame("a.b = a.b.x;a.b = a.b.x"); testSame("a.y = a.y.x;b = a.y;c = a.y.x"); test("a = a.x;b = a;c = a.x", "b = a = a.x;c = a.x"); test("b = a.x;a = b;c = a.x", "a = b = a.x;c = a.x"); } @Test public void testBadCollapseIntoCall() { // Can't collapse this, because if we did, 'foo' would be called // in the wrong 'this' context. testSame("this.foo = function() {}; this.foo();"); } @Test public void testBadCollapse() { testSame("this.$e$ = []; this.$b$ = null;"); } @Test public void testIssue1017() { testSame("x = x.parentNode.parentNode; x = x.parentNode.parentNode;"); } @Override protected CompilerPass getProcessor(Compiler compiler) { return new PeepholeOptimizationsPass(compiler, getName(), new ExploitAssigns()); } }
/* * Copyright 2002-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.core.annotation; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashSet; import java.util.Set; import java.util.stream.Collectors; import org.springframework.core.BridgeMethodResolver; import org.springframework.core.annotation.MergedAnnotation.Adapt; import org.springframework.core.annotation.MergedAnnotations.SearchStrategy; import org.springframework.lang.Nullable; import org.springframework.util.MultiValueMap; /** * General utility methods for finding annotations, meta-annotations, and * repeatable annotations on {@link AnnotatedElement AnnotatedElements}. * * <p> * {@code AnnotatedElementUtils} defines the public API for Spring's * meta-annotation programming model with support for <em>annotation attribute * overrides</em>. If you do not need support for annotation attribute * overrides, consider using {@link AnnotationUtils} instead. * * <p> * Note that the features of this class are not provided by the JDK's * introspection facilities themselves. * * <h3>Annotation Attribute Overrides</h3> * <p> * Support for meta-annotations with <em>attribute overrides</em> in * <em>composed annotations</em> is provided by all variants of the * {@code getMergedAnnotationAttributes()}, {@code getMergedAnnotation()}, * {@code getAllMergedAnnotations()}, {@code getMergedRepeatableAnnotations()}, * {@code findMergedAnnotationAttributes()}, {@code findMergedAnnotation()}, * {@code findAllMergedAnnotations()}, and * {@code findMergedRepeatableAnnotations()} methods. * * <h3>Find vs. Get Semantics</h3> * <p> * The search algorithms used by methods in this class follow either * <em>find</em> or <em>get</em> semantics. Consult the javadocs for each * individual method for details on which search algorithm is used. * * <p> * <strong>Get semantics</strong> are limited to searching for annotations that * are either <em>present</em> on an {@code AnnotatedElement} (i.e. declared * locally or {@linkplain java.lang.annotation.Inherited inherited}) or declared * within the annotation hierarchy <em>above</em> the {@code AnnotatedElement}. * * <p> * <strong>Find semantics</strong> are much more exhaustive, providing <em>get * semantics</em> plus support for the following: * * <ul> * <li>Searching on interfaces, if the annotated element is a class * <li>Searching on superclasses, if the annotated element is a class * <li>Resolving bridged methods, if the annotated element is a method * <li>Searching on methods in interfaces, if the annotated element is a method * <li>Searching on methods in superclasses, if the annotated element is a * method * </ul> * * <h3>Support for {@code @Inherited}</h3> * <p> * Methods following <em>get semantics</em> will honor the contract of Java's * {@link java.lang.annotation.Inherited @Inherited} annotation except that * locally declared annotations (including custom composed annotations) will be * favored over inherited annotations. In contrast, methods following <em>find * semantics</em> will completely ignore the presence of {@code @Inherited} * since the <em>find</em> search algorithm manually traverses type and method * hierarchies and thereby implicitly supports annotation inheritance without a * need for {@code @Inherited}. * * @author Phillip Webb * @author Juergen Hoeller * @author Sam Brannen * @since 4.0 * @see AliasFor * @see AnnotationAttributes * @see AnnotationUtils * @see BridgeMethodResolver */ public abstract class AnnotatedElementUtils { /** * Build an adapted {@link AnnotatedElement} for the given annotations, * typically for use with other methods on {@link AnnotatedElementUtils}. * * @param annotations the annotations to expose through the * {@code AnnotatedElement} * @since 4.3 */ public static AnnotatedElement forAnnotations(Annotation... annotations) {Thread.dumpStack(); return new AnnotatedElementForAnnotations(annotations); } /** * Get the fully qualified class names of all meta-annotation types * <em>present</em> on the annotation (of the specified {@code annotationType}) * on the supplied {@link AnnotatedElement}. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationType the annotation type on which to find meta-annotations * @return the names of all meta-annotations present on the annotation, or * {@code null} if not found * @since 4.2 * @see #getMetaAnnotationTypes(AnnotatedElement, String) * @see #hasMetaAnnotationTypes */ public static Set<String> getMetaAnnotationTypes(AnnotatedElement element, Class<? extends Annotation> annotationType) {Thread.dumpStack(); return getMetaAnnotationTypes(element, element.getAnnotation(annotationType)); } /** * Get the fully qualified class names of all meta-annotation types * <em>present</em> on the annotation (of the specified {@code annotationName}) * on the supplied {@link AnnotatedElement}. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationName the fully qualified class name of the annotation type * on which to find meta-annotations * @return the names of all meta-annotations present on the annotation, or an * empty set if none found * @see #getMetaAnnotationTypes(AnnotatedElement, Class) * @see #hasMetaAnnotationTypes */ public static Set<String> getMetaAnnotationTypes(AnnotatedElement element, String annotationName) {Thread.dumpStack(); for (Annotation annotation : element.getAnnotations()) { if (annotation.annotationType().getName().equals(annotationName)) { return getMetaAnnotationTypes(element, annotation); } } return Collections.emptySet(); } private static Set<String> getMetaAnnotationTypes(AnnotatedElement element, @Nullable Annotation annotation) {Thread.dumpStack(); if (annotation == null) { return Collections.emptySet(); } return getAnnotations(annotation.annotationType()).stream() .map(mergedAnnotation -> mergedAnnotation.getType().getName()) .collect(Collectors.toCollection(LinkedHashSet::new)); } /** * Determine if the supplied {@link AnnotatedElement} is annotated with a * <em>composed annotation</em> that is meta-annotated with an annotation of the * specified {@code annotationType}. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationType the meta-annotation type to find * @return {@code true} if a matching meta-annotation is present * @since 4.2.3 * @see #getMetaAnnotationTypes */ public static boolean hasMetaAnnotationTypes(AnnotatedElement element, Class<? extends Annotation> annotationType) {Thread.dumpStack(); return getAnnotations(element).stream(annotationType).anyMatch(MergedAnnotation::isMetaPresent); } /** * Determine if the supplied {@link AnnotatedElement} is annotated with a * <em>composed annotation</em> that is meta-annotated with an annotation of the * specified {@code annotationName}. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationName the fully qualified class name of the meta-annotation * type to find * @return {@code true} if a matching meta-annotation is present * @see #getMetaAnnotationTypes */ public static boolean hasMetaAnnotationTypes(AnnotatedElement element, String annotationName) {Thread.dumpStack(); return getAnnotations(element).stream(annotationName).anyMatch(MergedAnnotation::isMetaPresent); } /** * Determine if an annotation of the specified {@code annotationType} is * <em>present</em> on the supplied {@link AnnotatedElement} or within the * annotation hierarchy <em>above</em> the specified element. * <p> * If this method returns {@code true}, then * {@link #getMergedAnnotationAttributes} will return a non-null value. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationType the annotation type to find * @return {@code true} if a matching annotation is present * @since 4.2.3 * @see #hasAnnotation(AnnotatedElement, Class) */ public static boolean isAnnotated(AnnotatedElement element, Class<? extends Annotation> annotationType) {Thread.dumpStack(); // Shortcut: directly present on the element, with no merging needed? if (AnnotationFilter.PLAIN.matches(annotationType) || AnnotationsScanner.hasPlainJavaAnnotationsOnly(element)) { return element.isAnnotationPresent(annotationType); } // Exhaustive retrieval of merged annotations... return getAnnotations(element).isPresent(annotationType); } /** * Determine if an annotation of the specified {@code annotationName} is * <em>present</em> on the supplied {@link AnnotatedElement} or within the * annotation hierarchy <em>above</em> the specified element. * <p> * If this method returns {@code true}, then * {@link #getMergedAnnotationAttributes} will return a non-null value. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationName the fully qualified class name of the annotation type * to find * @return {@code true} if a matching annotation is present */ public static boolean isAnnotated(AnnotatedElement element, String annotationName) {Thread.dumpStack(); return getAnnotations(element).isPresent(annotationName); } /** * Get the first annotation of the specified {@code annotationType} within the * annotation hierarchy <em>above</em> the supplied {@code element} and merge * that annotation's attributes with <em>matching</em> attributes from * annotations in lower levels of the annotation hierarchy. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within the annotation hierarchy. * <p> * This method delegates to * {@link #getMergedAnnotationAttributes(AnnotatedElement, String)}. * * @param element the annotated element * @param annotationType the annotation type to find * @return the merged {@code AnnotationAttributes}, or {@code null} if not found * @since 4.2 * @see #getMergedAnnotationAttributes(AnnotatedElement, String, boolean, * boolean) * @see #findMergedAnnotationAttributes(AnnotatedElement, String, boolean, * boolean) * @see #getMergedAnnotation(AnnotatedElement, Class) * @see #findMergedAnnotation(AnnotatedElement, Class) */ @Nullable public static AnnotationAttributes getMergedAnnotationAttributes(AnnotatedElement element, Class<? extends Annotation> annotationType) {Thread.dumpStack(); MergedAnnotation<?> mergedAnnotation = getAnnotations(element).get(annotationType, null, MergedAnnotationSelectors.firstDirectlyDeclared()); return getAnnotationAttributes(mergedAnnotation, false, false); } /** * Get the first annotation of the specified {@code annotationName} within the * annotation hierarchy <em>above</em> the supplied {@code element} and merge * that annotation's attributes with <em>matching</em> attributes from * annotations in lower levels of the annotation hierarchy. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within the annotation hierarchy. * <p> * This method delegates to * {@link #getMergedAnnotationAttributes(AnnotatedElement, String, boolean, boolean)}, * supplying {@code false} for {@code classValuesAsString} and * {@code nestedAnnotationsAsMap}. * * @param element the annotated element * @param annotationName the fully qualified class name of the annotation type * to find * @return the merged {@code AnnotationAttributes}, or {@code null} if not found * @since 4.2 * @see #getMergedAnnotationAttributes(AnnotatedElement, String, boolean, * boolean) * @see #findMergedAnnotationAttributes(AnnotatedElement, String, boolean, * boolean) * @see #findMergedAnnotation(AnnotatedElement, Class) * @see #getAllAnnotationAttributes(AnnotatedElement, String) */ @Nullable public static AnnotationAttributes getMergedAnnotationAttributes(AnnotatedElement element, String annotationName) {Thread.dumpStack(); return getMergedAnnotationAttributes(element, annotationName, false, false); } /** * Get the first annotation of the specified {@code annotationName} within the * annotation hierarchy <em>above</em> the supplied {@code element} and merge * that annotation's attributes with <em>matching</em> attributes from * annotations in lower levels of the annotation hierarchy. * <p> * Attributes from lower levels in the annotation hierarchy override attributes * of the same name from higher levels, and {@link AliasFor @AliasFor} semantics * are fully supported, both within a single annotation and within the * annotation hierarchy. * <p> * In contrast to {@link #getAllAnnotationAttributes}, the search algorithm used * by this method will stop searching the annotation hierarchy once the first * annotation of the specified {@code annotationName} has been found. As a * consequence, additional annotations of the specified {@code annotationName} * will be ignored. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationName the fully qualified class name of the * annotation type to find * @param classValuesAsString whether to convert Class references into * Strings or to preserve them as Class references * @param nestedAnnotationsAsMap whether to convert nested Annotation instances * into {@code AnnotationAttributes} maps or to * preserve them as Annotation instances * @return the merged {@code AnnotationAttributes}, or {@code null} if not found * @since 4.2 * @see #findMergedAnnotation(AnnotatedElement, Class) * @see #findMergedAnnotationAttributes(AnnotatedElement, String, boolean, * boolean) * @see #getAllAnnotationAttributes(AnnotatedElement, String, boolean, boolean) */ @Nullable public static AnnotationAttributes getMergedAnnotationAttributes(AnnotatedElement element, String annotationName, boolean classValuesAsString, boolean nestedAnnotationsAsMap) {Thread.dumpStack(); MergedAnnotation<?> mergedAnnotation = getAnnotations(element).get(annotationName, null, MergedAnnotationSelectors.firstDirectlyDeclared()); return getAnnotationAttributes(mergedAnnotation, classValuesAsString, nestedAnnotationsAsMap); } /** * Get the first annotation of the specified {@code annotationType} within the * annotation hierarchy <em>above</em> the supplied {@code element}, merge that * annotation's attributes with <em>matching</em> attributes from annotations in * lower levels of the annotation hierarchy, and synthesize the result back into * an annotation of the specified {@code annotationType}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within the annotation hierarchy. * * @param element the annotated element * @param annotationType the annotation type to find * @return the merged, synthesized {@code Annotation}, or {@code null} if not * found * @since 4.2 * @see #findMergedAnnotation(AnnotatedElement, Class) */ @Nullable public static <A extends Annotation> A getMergedAnnotation(AnnotatedElement element, Class<A> annotationType) {Thread.dumpStack(); // Shortcut: directly present on the element, with no merging needed? if (AnnotationFilter.PLAIN.matches(annotationType) || AnnotationsScanner.hasPlainJavaAnnotationsOnly(element)) { return element.getDeclaredAnnotation(annotationType); } // Exhaustive retrieval of merged annotations... return getAnnotations(element).get(annotationType, null, MergedAnnotationSelectors.firstDirectlyDeclared()) .synthesize(MergedAnnotation::isPresent).orElse(null); } /** * Get <strong>all</strong> annotations of the specified {@code annotationType} * within the annotation hierarchy <em>above</em> the supplied {@code element}; * and for each annotation found, merge that annotation's attributes with * <em>matching</em> attributes from annotations in lower levels of the * annotation hierarchy and synthesize the results back into an annotation of * the specified {@code annotationType}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within annotation hierarchies. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element (never {@code null}) * @param annotationType the annotation type to find (never {@code null}) * @return the set of all merged, synthesized {@code Annotations} found, or an * empty set if none were found * @since 4.3 * @see #getMergedAnnotation(AnnotatedElement, Class) * @see #getAllAnnotationAttributes(AnnotatedElement, String) * @see #findAllMergedAnnotations(AnnotatedElement, Class) */ public static <A extends Annotation> Set<A> getAllMergedAnnotations(AnnotatedElement element, Class<A> annotationType) {Thread.dumpStack(); return getAnnotations(element).stream(annotationType).collect(MergedAnnotationCollectors.toAnnotationSet()); } /** * Get <strong>all</strong> annotations of the specified {@code annotationTypes} * within the annotation hierarchy <em>above</em> the supplied {@code element}; * and for each annotation found, merge that annotation's attributes with * <em>matching</em> attributes from annotations in lower levels of the * annotation hierarchy and synthesize the results back into an annotation of * the corresponding {@code annotationType}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within annotation hierarchies. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element (never {@code null}) * @param annotationTypes the annotation types to find * @return the set of all merged, synthesized {@code Annotations} found, or an * empty set if none were found * @since 5.1 * @see #getAllMergedAnnotations(AnnotatedElement, Class) */ public static Set<Annotation> getAllMergedAnnotations(AnnotatedElement element, Set<Class<? extends Annotation>> annotationTypes) {Thread.dumpStack(); return getAnnotations(element).stream().filter(MergedAnnotationPredicates.typeIn(annotationTypes)) .collect(MergedAnnotationCollectors.toAnnotationSet()); } /** * Get all <em>repeatable annotations</em> of the specified * {@code annotationType} within the annotation hierarchy <em>above</em> the * supplied {@code element}; and for each annotation found, merge that * annotation's attributes with <em>matching</em> attributes from annotations in * lower levels of the annotation hierarchy and synthesize the results back into * an annotation of the specified {@code annotationType}. * <p> * The container type that holds the repeatable annotations will be looked up * via {@link java.lang.annotation.Repeatable}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within annotation hierarchies. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element (never {@code null}) * @param annotationType the annotation type to find (never {@code null}) * @return the set of all merged repeatable {@code Annotations} found, or an * empty set if none were found * @throws IllegalArgumentException if the {@code element} or * {@code annotationType} is {@code null}, or * if the container type cannot be resolved * @since 4.3 * @see #getMergedAnnotation(AnnotatedElement, Class) * @see #getAllMergedAnnotations(AnnotatedElement, Class) * @see #getMergedRepeatableAnnotations(AnnotatedElement, Class, Class) */ public static <A extends Annotation> Set<A> getMergedRepeatableAnnotations(AnnotatedElement element, Class<A> annotationType) {Thread.dumpStack(); return getMergedRepeatableAnnotations(element, annotationType, null); } /** * Get all <em>repeatable annotations</em> of the specified * {@code annotationType} within the annotation hierarchy <em>above</em> the * supplied {@code element}; and for each annotation found, merge that * annotation's attributes with <em>matching</em> attributes from annotations in * lower levels of the annotation hierarchy and synthesize the results back into * an annotation of the specified {@code annotationType}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within annotation hierarchies. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element (never {@code null}) * @param annotationType the annotation type to find (never {@code null}) * @param containerType the type of the container that holds the annotations; * may be {@code null} if the container type should be * looked up via {@link java.lang.annotation.Repeatable} * @return the set of all merged repeatable {@code Annotations} found, or an * empty set if none were found * @throws IllegalArgumentException if the {@code element} or * {@code annotationType} is * {@code null}, or if the container * type cannot be resolved * @throws AnnotationConfigurationException if the supplied * {@code containerType} is not a valid * container annotation for the * supplied {@code annotationType} * @since 4.3 * @see #getMergedAnnotation(AnnotatedElement, Class) * @see #getAllMergedAnnotations(AnnotatedElement, Class) */ public static <A extends Annotation> Set<A> getMergedRepeatableAnnotations(AnnotatedElement element, Class<A> annotationType, @Nullable Class<? extends Annotation> containerType) {Thread.dumpStack(); return getRepeatableAnnotations(element, containerType, annotationType).stream(annotationType) .collect(MergedAnnotationCollectors.toAnnotationSet()); } /** * Get the annotation attributes of <strong>all</strong> annotations of the * specified {@code annotationName} in the annotation hierarchy above the * supplied {@link AnnotatedElement} and store the results in a * {@link MultiValueMap}. * <p> * Note: in contrast to * {@link #getMergedAnnotationAttributes(AnnotatedElement, String)}, this method * does <em>not</em> support attribute overrides. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationName the fully qualified class name of the annotation type * to find * @return a {@link MultiValueMap} keyed by attribute name, containing the * annotation attributes from all annotations found, or {@code null} if * not found * @see #getAllAnnotationAttributes(AnnotatedElement, String, boolean, boolean) */ @Nullable public static MultiValueMap<String, Object> getAllAnnotationAttributes(AnnotatedElement element, String annotationName) {Thread.dumpStack(); return getAllAnnotationAttributes(element, annotationName, false, false); } /** * Get the annotation attributes of <strong>all</strong> annotations of the * specified {@code annotationName} in the annotation hierarchy above the * supplied {@link AnnotatedElement} and store the results in a * {@link MultiValueMap}. * <p> * Note: in contrast to * {@link #getMergedAnnotationAttributes(AnnotatedElement, String)}, this method * does <em>not</em> support attribute overrides. * <p> * This method follows <em>get semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationName the fully qualified class name of the * annotation type to find * @param classValuesAsString whether to convert Class references into * Strings or to preserve them as Class references * @param nestedAnnotationsAsMap whether to convert nested Annotation instances * into {@code AnnotationAttributes} maps or to * preserve them as Annotation instances * @return a {@link MultiValueMap} keyed by attribute name, containing the * annotation attributes from all annotations found, or {@code null} if * not found */ @Nullable public static MultiValueMap<String, Object> getAllAnnotationAttributes(AnnotatedElement element, String annotationName, final boolean classValuesAsString, final boolean nestedAnnotationsAsMap) {Thread.dumpStack(); Adapt[] adaptations = Adapt.values(classValuesAsString, nestedAnnotationsAsMap); return getAnnotations(element).stream(annotationName) .filter(MergedAnnotationPredicates.unique(MergedAnnotation::getMetaTypes)) .map(MergedAnnotation::withNonMergedAttributes) .collect(MergedAnnotationCollectors.toMultiValueMap(AnnotatedElementUtils::nullIfEmpty, adaptations)); } /** * Determine if an annotation of the specified {@code annotationType} is * <em>available</em> on the supplied {@link AnnotatedElement} or within the * annotation hierarchy <em>above</em> the specified element. * <p> * If this method returns {@code true}, then * {@link #findMergedAnnotationAttributes} will return a non-null value. * <p> * This method follows <em>find semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationType the annotation type to find * @return {@code true} if a matching annotation is present * @since 4.3 * @see #isAnnotated(AnnotatedElement, Class) */ public static boolean hasAnnotation(AnnotatedElement element, Class<? extends Annotation> annotationType) {Thread.dumpStack(); // Shortcut: directly present on the element, with no merging needed? if (AnnotationFilter.PLAIN.matches(annotationType) || AnnotationsScanner.hasPlainJavaAnnotationsOnly(element)) { return element.isAnnotationPresent(annotationType); } // Exhaustive retrieval of merged annotations... return findAnnotations(element).isPresent(annotationType); } /** * Find the first annotation of the specified {@code annotationType} within the * annotation hierarchy <em>above</em> the supplied {@code element} and merge * that annotation's attributes with <em>matching</em> attributes from * annotations in lower levels of the annotation hierarchy. * <p> * Attributes from lower levels in the annotation hierarchy override attributes * of the same name from higher levels, and {@link AliasFor @AliasFor} semantics * are fully supported, both within a single annotation and within the * annotation hierarchy. * <p> * In contrast to {@link #getAllAnnotationAttributes}, the search algorithm used * by this method will stop searching the annotation hierarchy once the first * annotation of the specified {@code annotationType} has been found. As a * consequence, additional annotations of the specified {@code annotationType} * will be ignored. * <p> * This method follows <em>find semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationType the annotation type to find * @param classValuesAsString whether to convert Class references into * Strings or to preserve them as Class references * @param nestedAnnotationsAsMap whether to convert nested Annotation instances * into {@code AnnotationAttributes} maps or to * preserve them as Annotation instances * @return the merged {@code AnnotationAttributes}, or {@code null} if not found * @since 4.2 * @see #findMergedAnnotation(AnnotatedElement, Class) * @see #getMergedAnnotationAttributes(AnnotatedElement, String, boolean, * boolean) */ @Nullable public static AnnotationAttributes findMergedAnnotationAttributes(AnnotatedElement element, Class<? extends Annotation> annotationType, boolean classValuesAsString, boolean nestedAnnotationsAsMap) {Thread.dumpStack(); MergedAnnotation<?> mergedAnnotation = findAnnotations(element).get(annotationType, null, MergedAnnotationSelectors.firstDirectlyDeclared()); return getAnnotationAttributes(mergedAnnotation, classValuesAsString, nestedAnnotationsAsMap); } /** * Find the first annotation of the specified {@code annotationName} within the * annotation hierarchy <em>above</em> the supplied {@code element} and merge * that annotation's attributes with <em>matching</em> attributes from * annotations in lower levels of the annotation hierarchy. * <p> * Attributes from lower levels in the annotation hierarchy override attributes * of the same name from higher levels, and {@link AliasFor @AliasFor} semantics * are fully supported, both within a single annotation and within the * annotation hierarchy. * <p> * In contrast to {@link #getAllAnnotationAttributes}, the search algorithm used * by this method will stop searching the annotation hierarchy once the first * annotation of the specified {@code annotationName} has been found. As a * consequence, additional annotations of the specified {@code annotationName} * will be ignored. * <p> * This method follows <em>find semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationName the fully qualified class name of the * annotation type to find * @param classValuesAsString whether to convert Class references into * Strings or to preserve them as Class references * @param nestedAnnotationsAsMap whether to convert nested Annotation instances * into {@code AnnotationAttributes} maps or to * preserve them as Annotation instances * @return the merged {@code AnnotationAttributes}, or {@code null} if not found * @since 4.2 * @see #findMergedAnnotation(AnnotatedElement, Class) * @see #getMergedAnnotationAttributes(AnnotatedElement, String, boolean, * boolean) */ @Nullable public static AnnotationAttributes findMergedAnnotationAttributes(AnnotatedElement element, String annotationName, boolean classValuesAsString, boolean nestedAnnotationsAsMap) {Thread.dumpStack(); MergedAnnotation<?> mergedAnnotation = findAnnotations(element).get(annotationName, null, MergedAnnotationSelectors.firstDirectlyDeclared()); return getAnnotationAttributes(mergedAnnotation, classValuesAsString, nestedAnnotationsAsMap); } /** * Find the first annotation of the specified {@code annotationType} within the * annotation hierarchy <em>above</em> the supplied {@code element}, merge that * annotation's attributes with <em>matching</em> attributes from annotations in * lower levels of the annotation hierarchy, and synthesize the result back into * an annotation of the specified {@code annotationType}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within the annotation hierarchy. * <p> * This method follows <em>find semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element * @param annotationType the annotation type to find * @return the merged, synthesized {@code Annotation}, or {@code null} if not * found * @since 4.2 * @see #findAllMergedAnnotations(AnnotatedElement, Class) * @see #findMergedAnnotationAttributes(AnnotatedElement, String, boolean, * boolean) * @see #getMergedAnnotationAttributes(AnnotatedElement, Class) */ @Nullable public static <A extends Annotation> A findMergedAnnotation(AnnotatedElement element, Class<A> annotationType) {Thread.dumpStack(); // Shortcut: directly present on the element, with no merging needed? if (AnnotationFilter.PLAIN.matches(annotationType) || AnnotationsScanner.hasPlainJavaAnnotationsOnly(element)) { return element.getDeclaredAnnotation(annotationType); } // Exhaustive retrieval of merged annotations... return findAnnotations(element).get(annotationType, null, MergedAnnotationSelectors.firstDirectlyDeclared()) .synthesize(MergedAnnotation::isPresent).orElse(null); } /** * Find <strong>all</strong> annotations of the specified {@code annotationType} * within the annotation hierarchy <em>above</em> the supplied {@code element}; * and for each annotation found, merge that annotation's attributes with * <em>matching</em> attributes from annotations in lower levels of the * annotation hierarchy and synthesize the results back into an annotation of * the specified {@code annotationType}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within annotation hierarchies. * <p> * This method follows <em>find semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element (never {@code null}) * @param annotationType the annotation type to find (never {@code null}) * @return the set of all merged, synthesized {@code Annotations} found, or an * empty set if none were found * @since 4.3 * @see #findMergedAnnotation(AnnotatedElement, Class) * @see #getAllMergedAnnotations(AnnotatedElement, Class) */ public static <A extends Annotation> Set<A> findAllMergedAnnotations(AnnotatedElement element, Class<A> annotationType) {Thread.dumpStack(); return findAnnotations(element).stream(annotationType).sorted(highAggregateIndexesFirst()) .collect(MergedAnnotationCollectors.toAnnotationSet()); } /** * Find <strong>all</strong> annotations of the specified * {@code annotationTypes} within the annotation hierarchy <em>above</em> the * supplied {@code element}; and for each annotation found, merge that * annotation's attributes with <em>matching</em> attributes from annotations in * lower levels of the annotation hierarchy and synthesize the results back into * an annotation of the corresponding {@code annotationType}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within annotation hierarchies. * <p> * This method follows <em>find semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element (never {@code null}) * @param annotationTypes the annotation types to find * @return the set of all merged, synthesized {@code Annotations} found, or an * empty set if none were found * @since 5.1 * @see #findAllMergedAnnotations(AnnotatedElement, Class) */ public static Set<Annotation> findAllMergedAnnotations(AnnotatedElement element, Set<Class<? extends Annotation>> annotationTypes) {Thread.dumpStack(); return findAnnotations(element).stream().filter(MergedAnnotationPredicates.typeIn(annotationTypes)) .sorted(highAggregateIndexesFirst()).collect(MergedAnnotationCollectors.toAnnotationSet()); } /** * Find all <em>repeatable annotations</em> of the specified * {@code annotationType} within the annotation hierarchy <em>above</em> the * supplied {@code element}; and for each annotation found, merge that * annotation's attributes with <em>matching</em> attributes from annotations in * lower levels of the annotation hierarchy and synthesize the results back into * an annotation of the specified {@code annotationType}. * <p> * The container type that holds the repeatable annotations will be looked up * via {@link java.lang.annotation.Repeatable}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within annotation hierarchies. * <p> * This method follows <em>find semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element (never {@code null}) * @param annotationType the annotation type to find (never {@code null}) * @return the set of all merged repeatable {@code Annotations} found, or an * empty set if none were found * @throws IllegalArgumentException if the {@code element} or * {@code annotationType} is {@code null}, or * if the container type cannot be resolved * @since 4.3 * @see #findMergedAnnotation(AnnotatedElement, Class) * @see #findAllMergedAnnotations(AnnotatedElement, Class) * @see #findMergedRepeatableAnnotations(AnnotatedElement, Class, Class) */ public static <A extends Annotation> Set<A> findMergedRepeatableAnnotations(AnnotatedElement element, Class<A> annotationType) {Thread.dumpStack(); return findMergedRepeatableAnnotations(element, annotationType, null); } /** * Find all <em>repeatable annotations</em> of the specified * {@code annotationType} within the annotation hierarchy <em>above</em> the * supplied {@code element}; and for each annotation found, merge that * annotation's attributes with <em>matching</em> attributes from annotations in * lower levels of the annotation hierarchy and synthesize the results back into * an annotation of the specified {@code annotationType}. * <p> * {@link AliasFor @AliasFor} semantics are fully supported, both within a * single annotation and within annotation hierarchies. * <p> * This method follows <em>find semantics</em> as described in the * {@linkplain AnnotatedElementUtils class-level javadoc}. * * @param element the annotated element (never {@code null}) * @param annotationType the annotation type to find (never {@code null}) * @param containerType the type of the container that holds the annotations; * may be {@code null} if the container type should be * looked up via {@link java.lang.annotation.Repeatable} * @return the set of all merged repeatable {@code Annotations} found, or an * empty set if none were found * @throws IllegalArgumentException if the {@code element} or * {@code annotationType} is * {@code null}, or if the container * type cannot be resolved * @throws AnnotationConfigurationException if the supplied * {@code containerType} is not a valid * container annotation for the * supplied {@code annotationType} * @since 4.3 * @see #findMergedAnnotation(AnnotatedElement, Class) * @see #findAllMergedAnnotations(AnnotatedElement, Class) */ public static <A extends Annotation> Set<A> findMergedRepeatableAnnotations(AnnotatedElement element, Class<A> annotationType, @Nullable Class<? extends Annotation> containerType) {Thread.dumpStack(); return findRepeatableAnnotations(element, containerType, annotationType).stream(annotationType) .sorted(highAggregateIndexesFirst()).collect(MergedAnnotationCollectors.toAnnotationSet()); } private static MergedAnnotations getAnnotations(AnnotatedElement element) {Thread.dumpStack(); return MergedAnnotations.from(element, SearchStrategy.INHERITED_ANNOTATIONS, RepeatableContainers.none(), AnnotationFilter.PLAIN); } private static MergedAnnotations getRepeatableAnnotations(AnnotatedElement element, @Nullable Class<? extends Annotation> containerType, Class<? extends Annotation> annotationType) {Thread.dumpStack(); RepeatableContainers repeatableContainers = RepeatableContainers.of(annotationType, containerType); return MergedAnnotations.from(element, SearchStrategy.INHERITED_ANNOTATIONS, repeatableContainers, AnnotationFilter.PLAIN); } private static MergedAnnotations findAnnotations(AnnotatedElement element) {Thread.dumpStack(); return MergedAnnotations.from(element, SearchStrategy.TYPE_HIERARCHY, RepeatableContainers.none(), AnnotationFilter.PLAIN); } private static MergedAnnotations findRepeatableAnnotations(AnnotatedElement element, @Nullable Class<? extends Annotation> containerType, Class<? extends Annotation> annotationType) {Thread.dumpStack(); RepeatableContainers repeatableContainers = RepeatableContainers.of(annotationType, containerType); return MergedAnnotations.from(element, SearchStrategy.TYPE_HIERARCHY, repeatableContainers, AnnotationFilter.PLAIN); } @Nullable private static MultiValueMap<String, Object> nullIfEmpty(MultiValueMap<String, Object> map) {Thread.dumpStack(); return (map.isEmpty() ? null : map); } private static <A extends Annotation> Comparator<MergedAnnotation<A>> highAggregateIndexesFirst() {Thread.dumpStack(); return Comparator.<MergedAnnotation<A>>comparingInt(MergedAnnotation::getAggregateIndex).reversed(); } @Nullable private static AnnotationAttributes getAnnotationAttributes(MergedAnnotation<?> annotation, boolean classValuesAsString, boolean nestedAnnotationsAsMap) {Thread.dumpStack(); if (!annotation.isPresent()) { return null; } return annotation.asAnnotationAttributes(Adapt.values(classValuesAsString, nestedAnnotationsAsMap)); } /** * Adapted {@link AnnotatedElement} that hold specific annotations. */ private static class AnnotatedElementForAnnotations implements AnnotatedElement { private final Annotation[] annotations; AnnotatedElementForAnnotations(Annotation... annotations) { this.annotations = annotations; } @Override @SuppressWarnings("unchecked") @Nullable public <T extends Annotation> T getAnnotation(Class<T> annotationClass) { for (Annotation annotation : this.annotations) { if (annotation.annotationType() == annotationClass) { return (T) annotation; } } return null; } @Override public Annotation[] getAnnotations() { return this.annotations.clone(); } @Override public Annotation[] getDeclaredAnnotations() { return this.annotations.clone(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.runtime.io.checkpointing; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.runtime.checkpoint.CheckpointException; import org.apache.flink.runtime.checkpoint.CheckpointFailureReason; import org.apache.flink.runtime.checkpoint.channel.InputChannelInfo; import org.apache.flink.runtime.io.network.api.CancelCheckpointMarker; import org.apache.flink.runtime.io.network.api.CheckpointBarrier; import org.apache.flink.runtime.io.network.partition.consumer.CheckpointableInput; import org.apache.flink.runtime.jobgraph.tasks.CheckpointableTask; import org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinator; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.clock.Clock; import org.apache.flink.util.concurrent.FutureUtils; import org.apache.flink.util.function.FunctionWithException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import javax.annotation.concurrent.NotThreadSafe; import java.io.IOException; import java.time.Duration; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.CompletableFuture; import java.util.function.BiFunction; import static org.apache.flink.runtime.checkpoint.CheckpointFailureReason.CHECKPOINT_DECLINED_INPUT_END_OF_STREAM; import static org.apache.flink.runtime.checkpoint.CheckpointFailureReason.CHECKPOINT_DECLINED_SUBSUMED; import static org.apache.flink.util.Preconditions.checkState; /** * {@link SingleCheckpointBarrierHandler} is used for triggering checkpoint while reading the first * barrier and keeping track of the number of received barriers and consumed barriers. It can * handle/track just single checkpoint at a time. The behaviour when to actually trigger the * checkpoint and what the {@link CheckpointableInput} should do is controlled by {@link * BarrierHandlerState}. */ @Internal @NotThreadSafe public class SingleCheckpointBarrierHandler extends CheckpointBarrierHandler { private static final Logger LOG = LoggerFactory.getLogger(SingleCheckpointBarrierHandler.class); private final String taskName; private final ControllerImpl context; private final BiFunction<Callable<?>, Duration, Cancellable> registerTimer; private final SubtaskCheckpointCoordinator subTaskCheckpointCoordinator; private final CheckpointableInput[] inputs; /** * The checkpoint id to guarantee that we would trigger only one checkpoint when reading the * same barrier from different channels. */ private long currentCheckpointId = -1L; /** * The checkpoint barrier of the current pending checkpoint. It is to allow us to access the * checkpoint options when processing {@code EndOfPartitionEvent}. */ @Nullable private CheckpointBarrier pendingCheckpointBarrier; private final Set<InputChannelInfo> alignedChannels = new HashSet<>(); private int targetChannelCount; private long lastCancelledOrCompletedCheckpointId = -1L; private int numOpenChannels; private CompletableFuture<Void> allBarriersReceivedFuture = new CompletableFuture<>(); private BarrierHandlerState currentState; private Cancellable currentAlignmentTimer; private final boolean alternating; @VisibleForTesting public static SingleCheckpointBarrierHandler createUnalignedCheckpointBarrierHandler( SubtaskCheckpointCoordinator checkpointCoordinator, String taskName, CheckpointableTask toNotifyOnCheckpoint, Clock clock, boolean enableCheckpointsAfterTasksFinish, CheckpointableInput... inputs) { return unaligned( taskName, toNotifyOnCheckpoint, checkpointCoordinator, clock, (int) Arrays.stream(inputs) .flatMap(gate -> gate.getChannelInfos().stream()) .count(), (callable, duration) -> { throw new IllegalStateException( "Strictly unaligned checkpoints should never register any callbacks"); }, enableCheckpointsAfterTasksFinish, inputs); } public static SingleCheckpointBarrierHandler unaligned( String taskName, CheckpointableTask toNotifyOnCheckpoint, SubtaskCheckpointCoordinator checkpointCoordinator, Clock clock, int numOpenChannels, BiFunction<Callable<?>, Duration, Cancellable> registerTimer, boolean enableCheckpointAfterTasksFinished, CheckpointableInput... inputs) { return new SingleCheckpointBarrierHandler( taskName, toNotifyOnCheckpoint, checkpointCoordinator, clock, numOpenChannels, new AlternatingWaitingForFirstBarrierUnaligned(false, new ChannelState(inputs)), false, registerTimer, inputs, enableCheckpointAfterTasksFinished); } public static SingleCheckpointBarrierHandler aligned( String taskName, CheckpointableTask toNotifyOnCheckpoint, Clock clock, int numOpenChannels, BiFunction<Callable<?>, Duration, Cancellable> registerTimer, boolean enableCheckpointAfterTasksFinished, CheckpointableInput... inputs) { return new SingleCheckpointBarrierHandler( taskName, toNotifyOnCheckpoint, null, clock, numOpenChannels, new WaitingForFirstBarrier(inputs), false, registerTimer, inputs, enableCheckpointAfterTasksFinished); } public static SingleCheckpointBarrierHandler alternating( String taskName, CheckpointableTask toNotifyOnCheckpoint, SubtaskCheckpointCoordinator checkpointCoordinator, Clock clock, int numOpenChannels, BiFunction<Callable<?>, Duration, Cancellable> registerTimer, boolean enableCheckpointAfterTasksFinished, CheckpointableInput... inputs) { return new SingleCheckpointBarrierHandler( taskName, toNotifyOnCheckpoint, checkpointCoordinator, clock, numOpenChannels, new AlternatingWaitingForFirstBarrier(new ChannelState(inputs)), true, registerTimer, inputs, enableCheckpointAfterTasksFinished); } private SingleCheckpointBarrierHandler( String taskName, CheckpointableTask toNotifyOnCheckpoint, @Nullable SubtaskCheckpointCoordinator subTaskCheckpointCoordinator, Clock clock, int numOpenChannels, BarrierHandlerState currentState, boolean alternating, BiFunction<Callable<?>, Duration, Cancellable> registerTimer, CheckpointableInput[] inputs, boolean enableCheckpointAfterTasksFinished) { super(toNotifyOnCheckpoint, clock, enableCheckpointAfterTasksFinished); this.taskName = taskName; this.numOpenChannels = numOpenChannels; this.currentState = currentState; this.alternating = alternating; this.registerTimer = registerTimer; this.subTaskCheckpointCoordinator = subTaskCheckpointCoordinator; this.context = new ControllerImpl(); this.inputs = inputs; } @Override public void processBarrier( CheckpointBarrier barrier, InputChannelInfo channelInfo, boolean isRpcTriggered) throws IOException { long barrierId = barrier.getId(); LOG.debug("{}: Received barrier from channel {} @ {}.", taskName, channelInfo, barrierId); if (currentCheckpointId > barrierId || (currentCheckpointId == barrierId && !isCheckpointPending())) { if (!barrier.getCheckpointOptions().isUnalignedCheckpoint()) { inputs[channelInfo.getGateIdx()].resumeConsumption(channelInfo); } return; } checkNewCheckpoint(barrier); checkState(currentCheckpointId == barrierId); markCheckpointAlignedAndTransformState( channelInfo, barrier, state -> state.barrierReceived(context, channelInfo, barrier, !isRpcTriggered)); } protected void markCheckpointAlignedAndTransformState( InputChannelInfo alignedChannel, CheckpointBarrier barrier, FunctionWithException<BarrierHandlerState, BarrierHandlerState, Exception> stateTransformer) throws IOException { alignedChannels.add(alignedChannel); if (alignedChannels.size() == 1) { if (targetChannelCount == 1) { markAlignmentStartAndEnd(barrier.getId(), barrier.getTimestamp()); } else { markAlignmentStart(barrier.getId(), barrier.getTimestamp()); } } // we must mark alignment end before calling currentState.barrierReceived which might // trigger a checkpoint with unfinished future for alignment duration if (alignedChannels.size() == targetChannelCount) { if (targetChannelCount > 1) { markAlignmentEnd(); } } try { currentState = stateTransformer.apply(currentState); } catch (CheckpointException e) { abortInternal(currentCheckpointId, e); } catch (Exception e) { ExceptionUtils.rethrowIOException(e); } if (alignedChannels.size() == targetChannelCount) { alignedChannels.clear(); lastCancelledOrCompletedCheckpointId = currentCheckpointId; LOG.debug( "{}: All the channels are aligned for checkpoint {}.", taskName, currentCheckpointId); resetAlignmentTimer(); allBarriersReceivedFuture.complete(null); } } private void triggerCheckpoint(CheckpointBarrier trigger) throws IOException { LOG.debug( "{}: Triggering checkpoint {} on the barrier announcement at {}.", taskName, trigger.getId(), trigger.getTimestamp()); notifyCheckpoint(trigger); } @Override public void processBarrierAnnouncement( CheckpointBarrier announcedBarrier, int sequenceNumber, InputChannelInfo channelInfo) throws IOException { checkNewCheckpoint(announcedBarrier); long barrierId = announcedBarrier.getId(); if (currentCheckpointId > barrierId || (currentCheckpointId == barrierId && !isCheckpointPending())) { LOG.debug( "{}: Obsolete announcement of checkpoint {} for channel {}.", taskName, barrierId, channelInfo); return; } currentState = currentState.announcementReceived(context, channelInfo, sequenceNumber); } private void registerAlignmentTimer(CheckpointBarrier announcedBarrier) { long alignedCheckpointTimeout = announcedBarrier.getCheckpointOptions().getAlignedCheckpointTimeout(); long timePassedSinceCheckpointStart = getClock().absoluteTimeMillis() - announcedBarrier.getTimestamp(); long timerDelay = Math.max(alignedCheckpointTimeout - timePassedSinceCheckpointStart, 0); this.currentAlignmentTimer = registerTimer.apply( () -> { long barrierId = announcedBarrier.getId(); try { if (currentCheckpointId == barrierId && !getAllBarriersReceivedFuture(barrierId).isDone()) { currentState = currentState.alignmentTimeout( context, announcedBarrier); } } catch (CheckpointException ex) { this.abortInternal(barrierId, ex); } catch (Exception e) { ExceptionUtils.rethrowIOException(e); } currentAlignmentTimer = null; return null; }, Duration.ofMillis(timerDelay)); } private void checkNewCheckpoint(CheckpointBarrier barrier) throws IOException { long barrierId = barrier.getId(); if (currentCheckpointId >= barrierId) { return; // This barrier is not the first for this checkpoint. } if (isCheckpointPending()) { cancelSubsumedCheckpoint(barrierId); } currentCheckpointId = barrierId; pendingCheckpointBarrier = barrier; alignedChannels.clear(); targetChannelCount = numOpenChannels; allBarriersReceivedFuture = new CompletableFuture<>(); if (alternating && barrier.getCheckpointOptions().isTimeoutable()) { registerAlignmentTimer(barrier); } } @Override public void processCancellationBarrier( CancelCheckpointMarker cancelBarrier, InputChannelInfo channelInfo) throws IOException { final long cancelledId = cancelBarrier.getCheckpointId(); if (cancelledId > currentCheckpointId || (cancelledId == currentCheckpointId && alignedChannels.size() > 0)) { LOG.debug("{}: Received cancellation {}.", taskName, cancelledId); abortInternal( cancelledId, new CheckpointException( CheckpointFailureReason.CHECKPOINT_DECLINED_ON_CANCELLATION_BARRIER)); } } private void abortInternal(long cancelledId, CheckpointFailureReason reason) throws IOException { abortInternal(cancelledId, new CheckpointException(reason)); } private void abortInternal(long cancelledId, CheckpointException exception) throws IOException { LOG.debug( "{}: Aborting checkpoint {} after exception {}.", taskName, currentCheckpointId, exception); // by setting the currentCheckpointId to this checkpoint while keeping the numBarriers // at zero means that no checkpoint barrier can start a new alignment currentCheckpointId = Math.max(cancelledId, currentCheckpointId); lastCancelledOrCompletedCheckpointId = Math.max(lastCancelledOrCompletedCheckpointId, cancelledId); pendingCheckpointBarrier = null; alignedChannels.clear(); targetChannelCount = 0; resetAlignmentTimer(); currentState = currentState.abort(cancelledId); if (cancelledId == currentCheckpointId) { resetAlignment(); } notifyAbort(cancelledId, exception); allBarriersReceivedFuture.completeExceptionally(exception); } private void resetAlignmentTimer() { if (currentAlignmentTimer != null) { currentAlignmentTimer.cancel(); currentAlignmentTimer = null; } } @Override public void processEndOfPartition(InputChannelInfo channelInfo) throws IOException { numOpenChannels--; if (!isCheckpointAfterTasksFinishedEnabled()) { if (isCheckpointPending()) { LOG.warn( "{}: Received EndOfPartition(-1) before completing current checkpoint {}. Skipping current checkpoint.", taskName, currentCheckpointId); abortInternal(currentCheckpointId, CHECKPOINT_DECLINED_INPUT_END_OF_STREAM); } } else { if (!isCheckpointPending()) { return; } checkState( pendingCheckpointBarrier != null, "pending checkpoint barrier should not be null when" + " there is pending checkpoint."); markCheckpointAlignedAndTransformState( channelInfo, pendingCheckpointBarrier, state -> state.endOfPartitionReceived(context, channelInfo)); } } @Override public long getLatestCheckpointId() { return currentCheckpointId; } @Override public void close() throws IOException { resetAlignmentTimer(); allBarriersReceivedFuture.cancel(false); super.close(); } @Override protected boolean isCheckpointPending() { return currentCheckpointId != lastCancelledOrCompletedCheckpointId && currentCheckpointId >= 0; } private void cancelSubsumedCheckpoint(long barrierId) throws IOException { LOG.warn( "{}: Received checkpoint barrier for checkpoint {} before completing current checkpoint {}. " + "Skipping current checkpoint.", taskName, barrierId, currentCheckpointId); abortInternal(currentCheckpointId, CHECKPOINT_DECLINED_SUBSUMED); } public CompletableFuture<Void> getAllBarriersReceivedFuture(long checkpointId) { if (checkpointId < currentCheckpointId || numOpenChannels == 0) { return FutureUtils.completedVoidFuture(); } if (checkpointId > currentCheckpointId) { throw new IllegalStateException( "Checkpoint " + checkpointId + " has not been started at all"); } return allBarriersReceivedFuture; } @VisibleForTesting int getNumOpenChannels() { return numOpenChannels; } @Override public String toString() { return String.format( "%s: current checkpoint: %d, current aligned channels: %d, target channel count: %d", taskName, currentCheckpointId, alignedChannels.size(), targetChannelCount); } private final class ControllerImpl implements BarrierHandlerState.Controller { @Override public void triggerGlobalCheckpoint(CheckpointBarrier checkpointBarrier) throws IOException { SingleCheckpointBarrierHandler.this.triggerCheckpoint(checkpointBarrier); } @Override public boolean isTimedOut(CheckpointBarrier barrier) { return barrier.getCheckpointOptions().isTimeoutable() && barrier.getId() <= currentCheckpointId && barrier.getCheckpointOptions().getAlignedCheckpointTimeout() < (getClock().absoluteTimeMillis() - barrier.getTimestamp()); } @Override public boolean allBarriersReceived() { return alignedChannels.size() == targetChannelCount; } @Nullable @Override public CheckpointBarrier getPendingCheckpointBarrier() { return pendingCheckpointBarrier; } @Override public void initInputsCheckpoint(CheckpointBarrier checkpointBarrier) throws CheckpointException { checkState(subTaskCheckpointCoordinator != null); long barrierId = checkpointBarrier.getId(); subTaskCheckpointCoordinator.initInputsCheckpoint( barrierId, checkpointBarrier.getCheckpointOptions()); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.raptor.systemtables; import com.facebook.presto.spi.predicate.Domain; import com.facebook.presto.spi.predicate.Range; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.Type; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import io.airlift.slice.Slice; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import static com.facebook.presto.raptor.metadata.JdbcUtil.enableStreamingResults; import static com.facebook.presto.raptor.util.UuidUtil.uuidToBytes; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Strings.isNullOrEmpty; import static com.google.common.collect.Iterables.getOnlyElement; import static java.lang.String.format; import static java.sql.ResultSet.CONCUR_READ_ONLY; import static java.sql.ResultSet.TYPE_FORWARD_ONLY; import static java.util.Collections.nCopies; import static java.util.UUID.fromString; public class PreparedStatementBuilder { private PreparedStatementBuilder() {} public static PreparedStatement create( Connection connection, String sql, List<String> columnNames, List<Type> types, Set<Integer> uuidColumnIndexes, TupleDomain<Integer> tupleDomain) throws SQLException { checkArgument(!isNullOrEmpty(sql), "sql is null or empty"); List<ValueBuffer> bindValues = new ArrayList<>(256); sql = sql + getWhereClause(tupleDomain, columnNames, types, uuidColumnIndexes, bindValues); PreparedStatement statement = connection.prepareStatement(sql, TYPE_FORWARD_ONLY, CONCUR_READ_ONLY); enableStreamingResults(statement); // bind values to statement int bindIndex = 1; for (ValueBuffer value : bindValues) { bindField(value, statement, bindIndex, uuidColumnIndexes.contains(value.getColumnIndex())); bindIndex++; } return statement; } private static String getWhereClause( TupleDomain<Integer> tupleDomain, List<String> columnNames, List<Type> types, Set<Integer> uuidColumnIndexes, List<ValueBuffer> bindValues) { if (tupleDomain.isNone()) { return ""; } ImmutableList.Builder<String> conjunctsBuilder = ImmutableList.builder(); Map<Integer, Domain> domainMap = tupleDomain.getDomains().get(); for (Map.Entry<Integer, Domain> entry : domainMap.entrySet()) { int index = entry.getKey(); String columnName = columnNames.get(index); Type type = types.get(index); conjunctsBuilder.add(toPredicate(index, columnName, type, entry.getValue(), uuidColumnIndexes, bindValues)); } List<String> conjuncts = conjunctsBuilder.build(); if (conjuncts.isEmpty()) { return ""; } StringBuilder where = new StringBuilder("WHERE "); return Joiner.on(" AND\n").appendTo(where, conjuncts).toString(); } private static String toPredicate( int columnIndex, String columnName, Type type, Domain domain, Set<Integer> uuidColumnIndexes, List<ValueBuffer> bindValues) { if (domain.getValues().isAll()) { return domain.isNullAllowed() ? "TRUE" : columnName + " IS NOT NULL"; } if (domain.getValues().isNone()) { return domain.isNullAllowed() ? columnName + " IS NULL" : "FALSE"; } return domain.getValues().getValuesProcessor().transform( ranges -> { // Add disjuncts for ranges List<String> disjuncts = new ArrayList<>(); List<Object> singleValues = new ArrayList<>(); // Add disjuncts for ranges for (Range range : ranges.getOrderedRanges()) { checkState(!range.isAll()); // Already checked if (range.isSingleValue()) { singleValues.add(range.getLow().getValue()); } else { List<String> rangeConjuncts = new ArrayList<>(); if (!range.getLow().isLowerUnbounded()) { Object bindValue = getBindValue(columnIndex, uuidColumnIndexes, range.getLow().getValue()); switch (range.getLow().getBound()) { case ABOVE: rangeConjuncts.add(toBindPredicate(columnName, ">")); bindValues.add(ValueBuffer.create(columnIndex, type, bindValue)); break; case EXACTLY: rangeConjuncts.add(toBindPredicate(columnName, ">=")); bindValues.add(ValueBuffer.create(columnIndex, type, bindValue)); break; case BELOW: throw new IllegalStateException("Low Marker should never use BELOW bound: " + range); default: throw new AssertionError("Unhandled bound: " + range.getLow().getBound()); } } if (!range.getHigh().isUpperUnbounded()) { Object bindValue = getBindValue(columnIndex, uuidColumnIndexes, range.getHigh().getValue()); switch (range.getHigh().getBound()) { case ABOVE: throw new IllegalStateException("High Marker should never use ABOVE bound: " + range); case EXACTLY: rangeConjuncts.add(toBindPredicate(columnName, "<=")); bindValues.add(ValueBuffer.create(columnIndex, type, bindValue)); break; case BELOW: rangeConjuncts.add(toBindPredicate(columnName, "<")); bindValues.add(ValueBuffer.create(columnIndex, type, bindValue)); break; default: throw new AssertionError("Unhandled bound: " + range.getHigh().getBound()); } } // If rangeConjuncts is null, then the range was ALL, which should already have been checked for checkState(!rangeConjuncts.isEmpty()); disjuncts.add("(" + Joiner.on(" AND ").join(rangeConjuncts) + ")"); } } // Add back all of the possible single values either as an equality or an IN predicate if (singleValues.size() == 1) { disjuncts.add(toBindPredicate(columnName, "=")); bindValues.add(ValueBuffer.create(columnIndex, type, getBindValue(columnIndex, uuidColumnIndexes, getOnlyElement(singleValues)))); } else if (singleValues.size() > 1) { disjuncts.add(columnName + " IN (" + Joiner.on(",").join(nCopies(singleValues.size(), "?")) + ")"); for (Object singleValue : singleValues) { bindValues.add(ValueBuffer.create(columnIndex, type, getBindValue(columnIndex, uuidColumnIndexes, singleValue))); } } // Add nullability disjuncts checkState(!disjuncts.isEmpty()); if (domain.isNullAllowed()) { disjuncts.add(columnName + " IS NULL"); } return "(" + Joiner.on(" OR ").join(disjuncts) + ")"; }, discreteValues -> { String values = Joiner.on(",").join(nCopies(discreteValues.getValues().size(), "?")); String predicate = columnName + (discreteValues.isWhiteList() ? "" : " NOT") + " IN (" + values + ")"; for (Object value : discreteValues.getValues()) { bindValues.add(ValueBuffer.create(columnIndex, type, getBindValue(columnIndex, uuidColumnIndexes, value))); } if (domain.isNullAllowed()) { predicate = "(" + predicate + " OR " + columnName + " IS NULL)"; } return predicate; }, allOrNone -> { throw new IllegalStateException("Case should not be reachable"); }); } private static Object getBindValue(int columnIndex, Set<Integer> uuidColumnIndexes, Object value) { if (uuidColumnIndexes.contains(columnIndex)) { return uuidToBytes(fromString(((Slice) value).toStringUtf8())); } return value; } private static String toBindPredicate(String columnName, String operator) { return format("%s %s ?", columnName, operator); } private static void bindField(ValueBuffer valueBuffer, PreparedStatement preparedStatement, int parameterIndex, boolean isUuid) throws SQLException { Type type = valueBuffer.getType(); if (valueBuffer.isNull()) { preparedStatement.setNull(parameterIndex, typeToSqlType(type)); } else if (type.getJavaType() == long.class) { preparedStatement.setLong(parameterIndex, valueBuffer.getLong()); } else if (type.getJavaType() == double.class) { preparedStatement.setDouble(parameterIndex, valueBuffer.getDouble()); } else if (type.getJavaType() == boolean.class) { preparedStatement.setBoolean(parameterIndex, valueBuffer.getBoolean()); } else if (type.getJavaType() == Slice.class && isUuid) { preparedStatement.setBytes(parameterIndex, valueBuffer.getSlice().getBytes()); } else if (type.getJavaType() == Slice.class) { preparedStatement.setString(parameterIndex, new String(valueBuffer.getSlice().getBytes())); } else { throw new IllegalArgumentException("Unknown Java type: " + type.getJavaType()); } } private static int typeToSqlType(Type type) { if (type == BIGINT) { return Types.BIGINT; } if (type == DOUBLE) { return Types.DOUBLE; } if (type == BOOLEAN) { return Types.BOOLEAN; } if (type == VARCHAR) { return Types.VARCHAR; } if (type == VARBINARY) { return Types.VARBINARY; } throw new IllegalArgumentException("Unknown type: " + type); } }
/*- * -\-\- * async-google-pubsub-client * -- * Copyright (C) 2016 - 2017 Spotify AB * -- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -/-/- */ /* * Copyright (c) 2011-2015 Spotify AB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.spotify.google.cloud.pubsub.client; import com.google.common.util.concurrent.MoreExecutors; import com.swrve.ratelimitedlogger.RateLimitedLog; import org.joda.time.Duration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; import java.io.IOException; import java.util.Objects; import java.util.concurrent.CancellationException; import java.util.concurrent.CompletionStage; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.atomic.AtomicInteger; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; public class Puller implements Closeable { /** * A handler for received messages. */ public interface MessageHandler { /** * Called when a {@link Puller} receives a message. * * @param puller The {@link Puller} * @param subscription The subscription that the message was received on. * @param message The message. * @param ackId The ack id. * @return A future that should be completed with the ack id when the message has been consumed. */ CompletionStage<String> handleMessage(Puller puller, String subscription, Message message, String ackId); } private static final int MAX_LOG_RATE = 3; private static final Duration MAX_LOG_DURATION = Duration.millis(2000); private static final Logger logger = LoggerFactory.getLogger(Puller.class); private static final Logger LOG = RateLimitedLog.withRateLimit(logger) .maxRate(MAX_LOG_RATE) .every(MAX_LOG_DURATION) .build(); private final ScheduledExecutorService scheduler = MoreExecutors.getExitingScheduledExecutorService(new ScheduledThreadPoolExecutor(1)); private final Acker acker; private final Pubsub pubsub; private final String project; private final String subscription; private final MessageHandler handler; private final int concurrency; private final int batchSize; private final int maxOutstandingMessages; private final int maxAckQueueSize; private final long pullIntervalMillis; private final Backoff backoff; private final AtomicInteger outstandingRequests = new AtomicInteger(); private final AtomicInteger outstandingMessages = new AtomicInteger(); public Puller(final Builder builder) { this.pubsub = Objects.requireNonNull(builder.pubsub, "pubsub"); this.project = Objects.requireNonNull(builder.project, "project"); this.subscription = Objects.requireNonNull(builder.subscription, "subscription"); this.handler = Objects.requireNonNull(builder.handler, "handler"); this.concurrency = builder.concurrency; this.batchSize = builder.batchSize; this.maxOutstandingMessages = builder.maxOutstandingMessages; this.maxAckQueueSize = builder.maxAckQueueSize; this.pullIntervalMillis = builder.pullIntervalMillis; this.backoff = Backoff.builder() .initialInterval(builder.pullIntervalMillis) .maxBackoffMultiplier(builder.maxBackoffMultiplier) .build(); // Set up a batching acker for sending acks this.acker = Acker.builder() .pubsub(pubsub) .project(project) .subscription(subscription) .batchSize(batchSize) .concurrency(concurrency) .queueSize(maxAckQueueSize) .build(); // Start pulling pull(); // Schedule pulling to compensate for failures and exceeding the outstanding message limit scheduler.scheduleWithFixedDelay(this::pull, pullIntervalMillis, pullIntervalMillis, MILLISECONDS); } @Override public void close() throws IOException { scheduler.shutdownNow(); try { scheduler.awaitTermination(30, SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } acker.close(); } public int maxAckQueueSize() { return maxAckQueueSize; } public int maxOutstandingMessages() { return maxOutstandingMessages; } public int outstandingMessages() { return outstandingMessages.get(); } public int concurrency() { return concurrency; } public int outstandingRequests() { return outstandingRequests.get(); } public int batchSize() { return batchSize; } public String subscription() { return subscription; } public String project() { return project; } public long pullIntervalMillis() { return pullIntervalMillis; } private void pull() { while (outstandingRequests.get() < concurrency && outstandingMessages.get() < maxOutstandingMessages) { pullBatch(); } } private void pullBatch() { outstandingRequests.incrementAndGet(); pubsub.pull(project, subscription, true, batchSize) .whenComplete((messages, ex) -> { outstandingRequests.decrementAndGet(); // Bail if pull failed if (ex != null) { if ( ex instanceof RequestFailedException && ((RequestFailedException)ex).statusCode() == 429 ) { LOG.debug("Going too fast, backing off"); } else { LOG.error("Pull failed", ex); } backoff.sleep(); return; } // we are good. Lets go at full speed again. backoff.reset(); // Add entire batch to outstanding message count outstandingMessages.addAndGet(messages.size()); // Call handler for each received message for (final ReceivedMessage message : messages) { final CompletionStage<String> handlerFuture; try { handlerFuture = handler.handleMessage(this, subscription, message.message(), message.ackId()); } catch (Throwable t) { outstandingMessages.decrementAndGet(); LOG.error("Message handler threw exception", t); continue; } if (handlerFuture == null) { outstandingMessages.decrementAndGet(); LOG.error("Message handler returned null"); continue; } // Decrement the number of outstanding messages when handling is complete handlerFuture.whenComplete((ignore, throwable) -> outstandingMessages.decrementAndGet()); // Ack when the message handling successfully completes handlerFuture.thenAccept(acker::acknowledge).exceptionally(throwable -> { if (!(throwable instanceof CancellationException)) { LOG.error("Acking pubsub threw exception", throwable); } return null; }); } }); } /** * Create a builder that can be used to build a {@link Puller}. */ public static Builder builder() { return new Builder(); } /** * A builder that can be used to build a {@link Puller}. */ public static class Builder { private Pubsub pubsub; private String project; private String subscription; private MessageHandler handler; private int concurrency = 64; private int batchSize = 1000; private int maxOutstandingMessages = 64_000; private int maxAckQueueSize = 10 * batchSize; private long pullIntervalMillis = 1000; private int maxBackoffMultiplier = 0; /** * Set the {@link Pubsub} client to use. The client will be closed when this {@link Puller} is closed. * * <p>Note: The client should be configured to at least allow as many connections as the concurrency level of this * {@link Puller}.</p> */ public Builder pubsub(final Pubsub pubsub) { this.pubsub = pubsub; return this; } /** * Set the Google Cloud project to pull from. */ public Builder project(final String project) { this.project = project; return this; } /** * The subscription to pull from. */ public Builder subscription(final String subscription) { this.subscription = subscription; return this; } /** * The handler to call for received messages. */ public Builder messageHandler(final MessageHandler messageHandler) { this.handler = messageHandler; return this; } /** * Set the Google Cloud Pub/Sub request concurrency level. Default is {@code 64}. */ public Builder concurrency(final int concurrency) { this.concurrency = concurrency; return this; } /** * Set the Google Cloud Pub/Sub pull batch size. Default is {@code 1000}. */ public Builder batchSize(final int batchSize) { this.batchSize = batchSize; return this; } /** * Set the limit of outstanding messages pending handling. Pulling is throttled when this limit is hit. Default is * {@code 64000}. */ public Builder maxOutstandingMessages(final int maxOutstandingMessages) { this.maxOutstandingMessages = maxOutstandingMessages; return this; } /** * Set the max size for the queue of acks back to Google Cloud Pub/Sub. Default is {@code 10 * batchSize}. */ public Builder maxAckQueueSize(final int maxAckQueueSize) { this.maxAckQueueSize = maxAckQueueSize; return this; } /** * Set the pull interval in millis. Default is {@code 1000} millis. */ public Builder pullIntervalMillis(final long pullIntervalMillis) { this.pullIntervalMillis = pullIntervalMillis; return this; } /** * Set the maximum backoff multiplier. Default is {@code 0} (no backoff). */ public Builder maxBackoffMultiplier(final int maxBackoffMultiplier) { this.maxBackoffMultiplier = maxBackoffMultiplier; return this; } /** * Build a {@link Puller}. */ public Puller build() { return new Puller(this); } } }
/* * Copyright 2012 Last.fm * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fm.last.moji.tracker.impl; import static fm.last.moji.tracker.impl.ErrorCode.KEY_EXISTS; import static fm.last.moji.tracker.impl.ErrorCode.UNKNOWN_CLASS; import static fm.last.moji.tracker.impl.ErrorCode.UNKNOWN_COMMAND; import static fm.last.moji.tracker.impl.ErrorCode.UNKNOWN_KEY; import static fm.last.moji.tracker.impl.ResponseStatus.OK; import java.net.Socket; import java.net.URL; import java.util.List; import java.util.Map; import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import fm.last.moji.tracker.Destination; import fm.last.moji.tracker.KeyExistsAlreadyException; import fm.last.moji.tracker.Tracker; import fm.last.moji.tracker.TrackerException; import fm.last.moji.tracker.UnknownCommandException; import fm.last.moji.tracker.UnknownKeyException; import fm.last.moji.tracker.UnknownStorageClassException; import fm.last.moji.tracker.impl.Request.Builder; class TrackerImpl implements Tracker { private static final Logger log = LoggerFactory.getLogger(TrackerImpl.class); private final Socket socket; private final RequestHandler requestHandler; public TrackerImpl(Socket socket, RequestHandler requestHandler) { this.socket = socket; this.requestHandler = requestHandler; } @Override public List<URL> getPaths(String key, String domain) throws TrackerException { GetPathsOperation operation = new GetPathsOperation(requestHandler, domain, key, false); operation.execute(); return operation.getPaths(); } @Override public Map<String, String> fileInfo(String key, String domain) throws TrackerException { String command = "file_info"; Request request = new Request.Builder(10).command(command).arg("domain", domain).arg("key", key).build(); Response response = requestHandler.performRequest(request); if (response.getStatus() != OK) { String message = response.getMessage(); handleUnknownKeyException(key, domain, message); handleUnknownCommandException(command, message); throw new TrackerException(message); } return response.getValueMap(); } @Override public List<Destination> createOpen(String key, String domain, String storageClass) throws TrackerException { CreateOpenOperation operation = new CreateOpenOperation(requestHandler, domain, key, storageClass, true); operation.execute(); return operation.getDestinations(); } @Override public List<String> list(String domain, String keyPrefix, Integer limit) throws TrackerException { ListKeysOperation operation = new ListKeysOperation(requestHandler, domain, keyPrefix, limit); operation.execute(); return operation.getKeys(); } @Override public void createClose(String key, String domain, Destination destination, long size) throws TrackerException { Request request = new Builder(6).command("create_close").arg("domain", domain).arg("key", key) .arg("devid", destination.getDevId()).arg("path", destination.getPath()).arg("fid", destination.getFid()) .arg("size", size).build(); Response response = requestHandler.performRequest(request); handleGeneralResponseError(response); } @Override public void delete(String key, String domain) throws TrackerException { Request request = new Request.Builder(2).command("delete").arg("domain", domain).arg("key", key).build(); Response response = requestHandler.performRequest(request); if (response.getStatus() != OK) { String message = response.getMessage(); handleUnknownKeyException(key, domain, message); throw new TrackerException(message); } } @Override public void rename(String fromKey, String domain, String toKey) throws TrackerException { Request request = new Request.Builder(3).command("rename").arg("domain", domain).arg("from_key", fromKey) .arg("to_key", toKey).build(); Response response = requestHandler.performRequest(request); if (response.getStatus() != OK) { String message = response.getMessage(); handleUnknownKeyException(fromKey, domain, message); handleKeyAlreadyExists(domain, toKey, message); throw new TrackerException(message); } } @Override public void updateStorageClass(String key, String domain, String newStorageClass) throws TrackerException { Request request = new Request.Builder(3).command("updateclass").arg("domain", domain).arg("key", key) .arg("class", newStorageClass).build(); Response response = requestHandler.performRequest(request); if (response.getStatus() != OK) { String message = response.getMessage(); handleUnknownKeyException(key, domain, message); handleUnknownStorageClass(newStorageClass, message); throw new TrackerException(message); } } @Override public Map<String, String> getDevicesStatus(String domain) throws TrackerException { Request request = new Request.Builder(3).command("get_devices").arg("domain", domain).build(); Response response = requestHandler.performRequest(request); if (response.getStatus() != ResponseStatus.OK) { String message = response.getMessage(); throw new TrackerException(message); } return response.getValueMap(); } @Override public void noop() throws TrackerException { Request request = new Request.Builder(0).command("noop").build(); Response response = requestHandler.performRequest(request); handleGeneralResponseError(response); } @Override public void close() { if (requestHandler != null) { requestHandler.close(); } IOUtils.closeQuietly(socket); log.debug("Closed"); } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("TrackerImpl [socket="); builder.append(socket); builder.append(", requestHandler="); builder.append(requestHandler); builder.append("]"); return builder.toString(); } private void handleUnknownStorageClass(String storageClass, String message) throws UnknownStorageClassException { if (UNKNOWN_CLASS.isContainedInLine(message)) { throw new UnknownStorageClassException(storageClass); } } private void handleKeyAlreadyExists(String domain, String key, String message) throws KeyExistsAlreadyException { if (KEY_EXISTS.isContainedInLine(message)) { throw new KeyExistsAlreadyException(domain, key); } } private void handleUnknownKeyException(String key, String domain, String message) throws UnknownKeyException { if (UNKNOWN_KEY.isContainedInLine(message)) { throw new UnknownKeyException(domain, key); } } private void handleUnknownCommandException(String command, String message) throws UnknownCommandException { if (UNKNOWN_COMMAND.isContainedInLine(message)) { throw new UnknownCommandException(command); } } private void handleGeneralResponseError(Response response) throws TrackerException { if (response.getStatus() != OK) { throw new TrackerException(response.getMessage()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.spark; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.spark.SparkContext; import org.apache.spark.SparkRBackend; import org.apache.spark.api.java.JavaSparkContext; import org.apache.zeppelin.interpreter.AbstractInterpreter; import org.apache.zeppelin.interpreter.BaseZeppelinContext; import org.apache.zeppelin.interpreter.Interpreter; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.interpreter.InterpreterException; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.scheduler.Scheduler; import org.apache.zeppelin.scheduler.SchedulerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.concurrent.atomic.AtomicBoolean; import static org.apache.zeppelin.spark.ZeppelinRDisplay.render; /** * R and SparkR interpreter with visualization support. */ public class SparkRInterpreter extends AbstractInterpreter { private static final Logger logger = LoggerFactory.getLogger(SparkRInterpreter.class); private String renderOptions; private SparkInterpreter sparkInterpreter; private boolean isSpark2; private ZeppelinR zeppelinR; private AtomicBoolean rbackendDead = new AtomicBoolean(false); private SparkContext sc; private JavaSparkContext jsc; public SparkRInterpreter(Properties property) { super(property); } @Override public void open() throws InterpreterException { String rCmdPath = getProperty("zeppelin.R.cmd", "R"); String sparkRLibPath; if (System.getenv("SPARK_HOME") != null) { // local or yarn-client mode when SPARK_HOME is specified sparkRLibPath = System.getenv("SPARK_HOME") + "/R/lib"; } else if (System.getenv("ZEPPELIN_HOME") != null){ // embedded mode when SPARK_HOME is not specified sparkRLibPath = System.getenv("ZEPPELIN_HOME") + "/interpreter/spark/R/lib"; // workaround to make sparkr work without SPARK_HOME System.setProperty("spark.test.home", System.getenv("ZEPPELIN_HOME") + "/interpreter/spark"); } else { // yarn-cluster mode sparkRLibPath = "sparkr"; } if (!new File(sparkRLibPath).exists()) { throw new InterpreterException(String.format("sparkRLib %s doesn't exist", sparkRLibPath)); } this.sparkInterpreter = getInterpreterInTheSameSessionByClassName(SparkInterpreter.class); this.sc = sparkInterpreter.getSparkContext(); this.jsc = sparkInterpreter.getJavaSparkContext(); // Share the same SparkRBackend across sessions SparkVersion sparkVersion = new SparkVersion(sc.version()); synchronized (SparkRBackend.backend()) { if (!SparkRBackend.isStarted()) { SparkRBackend.init(sparkVersion); SparkRBackend.start(); } } this.isSpark2 = sparkVersion.newerThanEquals(SparkVersion.SPARK_2_0_0); int timeout = this.sc.getConf().getInt("spark.r.backendConnectionTimeout", 6000); ZeppelinRContext.setSparkContext(sc); ZeppelinRContext.setJavaSparkContext(jsc); if (isSpark2) { ZeppelinRContext.setSparkSession(sparkInterpreter.getSparkSession()); } ZeppelinRContext.setSqlContext(sparkInterpreter.getSQLContext()); ZeppelinRContext.setZeppelinContext(sparkInterpreter.getZeppelinContext()); zeppelinR = new ZeppelinR(rCmdPath, sparkRLibPath, SparkRBackend.port(), sparkVersion, timeout, this); try { zeppelinR.open(); logger.info("ZeppelinR is opened successfully."); } catch (IOException e) { throw new InterpreterException("Exception while opening SparkRInterpreter", e); } if (useKnitr()) { zeppelinR.eval("library('knitr')"); } renderOptions = getProperty("zeppelin.R.render.options", "out.format = 'html', comment = NA, echo = FALSE, results = 'asis', message = F, " + "warning = F, fig.retina = 2"); } @Override public InterpreterResult internalInterpret(String lines, InterpreterContext interpreterContext) throws InterpreterException { Utils.printDeprecateMessage(sparkInterpreter.getSparkVersion(), interpreterContext, properties); String jobGroup = Utils.buildJobGroupId(interpreterContext); String jobDesc = Utils.buildJobDesc(interpreterContext); sparkInterpreter.getSparkContext().setJobGroup(jobGroup, jobDesc, false); String imageWidth = getProperty("zeppelin.R.image.width", "100%"); if (interpreterContext.getLocalProperties().containsKey("imageWidth")) { imageWidth = interpreterContext.getLocalProperties().get("imageWidth"); } String setJobGroup = ""; // assign setJobGroup to dummy__, otherwise it would print NULL for this statement if (isSpark2) { setJobGroup = "dummy__ <- setJobGroup(\"" + jobGroup + "\", \" +" + jobDesc + "\", TRUE)"; } else { setJobGroup = "dummy__ <- setJobGroup(sc, \"" + jobGroup + "\", \"" + jobDesc + "\", TRUE)"; } lines = setJobGroup + "\n" + lines; if (sparkInterpreter.getSparkVersion().newerThanEquals(SparkVersion.SPARK_2_3_0)) { // setLocalProperty is only available from spark 2.3.0 String setPoolStmt = "setLocalProperty('spark.scheduler.pool', NULL)"; if (interpreterContext.getLocalProperties().containsKey("pool")) { setPoolStmt = "setLocalProperty('spark.scheduler.pool', '" + interpreterContext.getLocalProperties().get("pool") + "')"; } lines = setPoolStmt + "\n" + lines; } try { // render output with knitr if (rbackendDead.get()) { return new InterpreterResult(InterpreterResult.Code.ERROR, "sparkR backend is dead, please try to increase spark.r.backendConnectionTimeout"); } if (useKnitr()) { zeppelinR.setInterpreterOutput(null); zeppelinR.set(".zcmd", "\n```{r " + renderOptions + "}\n" + lines + "\n```"); zeppelinR.eval(".zres <- knit2html(text=.zcmd)"); String html = zeppelinR.getS0(".zres"); RDisplay rDisplay = render(html, imageWidth); return new InterpreterResult( rDisplay.code(), rDisplay.typ(), rDisplay.content() ); } else { // alternatively, stream the output (without knitr) zeppelinR.setInterpreterOutput(interpreterContext.out); zeppelinR.eval(lines); return new InterpreterResult(InterpreterResult.Code.SUCCESS, ""); } } catch (Exception e) { logger.error("Exception while connecting to R", e); return new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage()); } } @Override public void close() throws InterpreterException { if (this.zeppelinR != null) { zeppelinR.close(); } if (this.sparkInterpreter != null) { this.sparkInterpreter.close(); this.sparkInterpreter = null; } } @Override public void cancel(InterpreterContext context) { if (this.sc != null) { sc.cancelJobGroup(Utils.buildJobGroupId(context)); } } @Override public FormType getFormType() { return FormType.NATIVE; } @Override public int getProgress(InterpreterContext context) throws InterpreterException { if (sparkInterpreter != null) { return sparkInterpreter.getProgress(context); } else { return 0; } } @Override public Scheduler getScheduler() { return SchedulerFactory.singleton().createOrGetFIFOScheduler( SparkRInterpreter.class.getName() + this.hashCode()); } @Override public BaseZeppelinContext getZeppelinContext() { return sparkInterpreter.getZeppelinContext(); } @Override public List<InterpreterCompletion> completion(String buf, int cursor, InterpreterContext interpreterContext) { return new ArrayList<>(); } private boolean useKnitr() { return Boolean.parseBoolean(getProperty("zeppelin.R.knitr", "true")); } public AtomicBoolean getRbackendDead() { return rbackendDead; } }
/* * Copyright 2015 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.util.concurrent; import io.netty.util.internal.ObjectUtil; import java.util.PriorityQueue; import java.util.Queue; import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * Abstract base class for {@link EventExecutor}s that want to support scheduling. */ public abstract class AbstractScheduledEventExecutor extends AbstractEventExecutor { Queue<ScheduledFutureTask<?>> scheduledTaskQueue; protected static long nanoTime() { return ScheduledFutureTask.nanoTime(); } Queue<ScheduledFutureTask<?>> scheduledTaskQueue() { if (scheduledTaskQueue == null) { scheduledTaskQueue = new PriorityQueue<ScheduledFutureTask<?>>(); } return scheduledTaskQueue; } private static boolean isNullOrEmpty(Queue<ScheduledFutureTask<?>> queue) { return queue == null || queue.isEmpty(); } /** * Cancel all scheduled tasks. * * This method MUST be called only when {@link #inEventLoop()} is {@code true}. */ protected void cancelScheduledTasks() { assert inEventLoop(); Queue<ScheduledFutureTask<?>> scheduledTaskQueue = this.scheduledTaskQueue; if (isNullOrEmpty(scheduledTaskQueue)) { return; } final ScheduledFutureTask<?>[] scheduledTasks = scheduledTaskQueue.toArray(new ScheduledFutureTask<?>[scheduledTaskQueue.size()]); for (ScheduledFutureTask<?> task: scheduledTasks) { task.cancelWithoutRemove(false); } scheduledTaskQueue.clear(); } /** * @see #pollScheduledTask(long) */ protected final Runnable pollScheduledTask() { return pollScheduledTask(nanoTime()); } /** * Return the {@link Runnable} which is ready to be executed with the given {@code nanoTime}. * You should use {@link #nanoTime()} to retrieve the the correct {@code nanoTime}. */ protected final Runnable pollScheduledTask(long nanoTime) { assert inEventLoop(); Queue<ScheduledFutureTask<?>> scheduledTaskQueue = this.scheduledTaskQueue; ScheduledFutureTask<?> scheduledTask = scheduledTaskQueue == null ? null : scheduledTaskQueue.peek(); if (scheduledTask == null) { return null; } if (scheduledTask.deadlineNanos() <= nanoTime) { scheduledTaskQueue.remove(); return scheduledTask; } return null; } /** * Return the nanoseconds when the next scheduled task is ready to be run or {@code -1} if no task is scheduled. */ protected final long nextScheduledTaskNano() { Queue<ScheduledFutureTask<?>> scheduledTaskQueue = this.scheduledTaskQueue; ScheduledFutureTask<?> scheduledTask = scheduledTaskQueue == null ? null : scheduledTaskQueue.peek(); if (scheduledTask == null) { return -1; } return Math.max(0, scheduledTask.deadlineNanos() - nanoTime()); } final ScheduledFutureTask<?> peekScheduledTask() { Queue<ScheduledFutureTask<?>> scheduledTaskQueue = this.scheduledTaskQueue; if (scheduledTaskQueue == null) { return null; } return scheduledTaskQueue.peek(); } /** * Returns {@code true} if a scheduled task is ready for processing. */ protected final boolean hasScheduledTasks() { Queue<ScheduledFutureTask<?>> scheduledTaskQueue = this.scheduledTaskQueue; ScheduledFutureTask<?> scheduledTask = scheduledTaskQueue == null ? null : scheduledTaskQueue.peek(); return scheduledTask != null && scheduledTask.deadlineNanos() <= nanoTime(); } @Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) { ObjectUtil.checkNotNull(command, "command"); ObjectUtil.checkNotNull(unit, "unit"); if (delay < 0) { delay = 0; } return schedule(new ScheduledFutureTask<Void>( this, command, null, ScheduledFutureTask.deadlineNanos(unit.toNanos(delay)))); } @Override public <V> ScheduledFuture<V> schedule(Callable<V> callable, long delay, TimeUnit unit) { ObjectUtil.checkNotNull(callable, "callable"); ObjectUtil.checkNotNull(unit, "unit"); if (delay < 0) { delay = 0; } return schedule(new ScheduledFutureTask<V>( this, callable, ScheduledFutureTask.deadlineNanos(unit.toNanos(delay)))); } @Override public ScheduledFuture<?> scheduleAtFixedRate(Runnable command, long initialDelay, long period, TimeUnit unit) { ObjectUtil.checkNotNull(command, "command"); ObjectUtil.checkNotNull(unit, "unit"); if (initialDelay < 0) { throw new IllegalArgumentException( String.format("initialDelay: %d (expected: >= 0)", initialDelay)); } if (period <= 0) { throw new IllegalArgumentException( String.format("period: %d (expected: > 0)", period)); } return schedule(new ScheduledFutureTask<Void>( this, Executors.<Void>callable(command, null), ScheduledFutureTask.deadlineNanos(unit.toNanos(initialDelay)), unit.toNanos(period))); } @Override public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, long initialDelay, long delay, TimeUnit unit) { ObjectUtil.checkNotNull(command, "command"); ObjectUtil.checkNotNull(unit, "unit"); if (initialDelay < 0) { throw new IllegalArgumentException( String.format("initialDelay: %d (expected: >= 0)", initialDelay)); } if (delay <= 0) { throw new IllegalArgumentException( String.format("delay: %d (expected: > 0)", delay)); } return schedule(new ScheduledFutureTask<Void>( this, Executors.<Void>callable(command, null), ScheduledFutureTask.deadlineNanos(unit.toNanos(initialDelay)), -unit.toNanos(delay))); } <V> ScheduledFuture<V> schedule(final ScheduledFutureTask<V> task) { if (inEventLoop()) { scheduledTaskQueue().add(task); } else { execute(new Runnable() { @Override public void run() { scheduledTaskQueue().add(task); } }); } return task; } final void removeScheduled(final ScheduledFutureTask<?> task) { if (inEventLoop()) { scheduledTaskQueue().remove(task); } else { execute(new Runnable() { @Override public void run() { removeScheduled(task); } }); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.cache.store.jdbc; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cache.store.jdbc.model.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.testframework.junits.common.*; import org.apache.ignite.transactions.*; import org.jetbrains.annotations.*; import org.springframework.beans.*; import org.springframework.beans.factory.xml.*; import org.springframework.context.support.*; import org.springframework.core.io.*; import java.net.*; import java.sql.*; import java.util.*; import java.util.concurrent.*; import static org.apache.ignite.cache.CacheAtomicityMode.*; import static org.apache.ignite.cache.CacheMode.*; import static org.apache.ignite.testframework.GridTestUtils.*; /** * */ public abstract class CacheJdbcStoreAbstractMultithreadedSelfTest<T extends CacheAbstractJdbcStore> extends GridCommonAbstractTest { /** Default config with mapping. */ private static final String DFLT_MAPPING_CONFIG = "modules/core/src/test/config/store/jdbc/ignite-type-metadata.xml"; /** Database connection URL. */ protected static final String DFLT_CONN_URL = "jdbc:h2:mem:autoCacheStore;DB_CLOSE_DELAY=-1"; /** IP finder. */ protected static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** Number of transactions. */ private static final int TX_CNT = 200; /** Number of transactions. */ private static final int BATCH_CNT = 2000; /** Cache store. */ protected static CacheAbstractJdbcStore store; /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { store = store(); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { store = null; } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { Connection conn = DriverManager.getConnection(DFLT_CONN_URL, "sa", ""); Statement stmt = conn.createStatement(); stmt.executeUpdate("DROP TABLE IF EXISTS Organization"); stmt.executeUpdate("DROP TABLE IF EXISTS Person"); stmt.executeUpdate("CREATE TABLE Organization (id integer PRIMARY KEY, name varchar(50), city varchar(50))"); stmt.executeUpdate("CREATE TABLE Person (id integer PRIMARY KEY, org_id integer, name varchar(50))"); conn.commit(); U.closeQuiet(stmt); U.closeQuiet(conn); startGrid(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); } /** * @return New store. * @throws Exception In case of error. */ protected abstract T store() throws Exception; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration c = super.getConfiguration(gridName); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(IP_FINDER); c.setDiscoverySpi(disco); c.setCacheConfiguration(cacheConfiguration()); return c; } /** */ protected CacheConfiguration cacheConfiguration() throws Exception { CacheConfiguration cc = defaultCacheConfiguration(); cc.setCacheMode(PARTITIONED); cc.setAtomicityMode(ATOMIC); cc.setSwapEnabled(false); cc.setWriteBehindEnabled(false); URL cfgUrl; try { cfgUrl = new URL(DFLT_MAPPING_CONFIG); } catch (MalformedURLException ignore) { cfgUrl = U.resolveIgniteUrl(DFLT_MAPPING_CONFIG); } if (cfgUrl == null) throw new Exception("Failed to resolve metadata path: " + DFLT_MAPPING_CONFIG); try { GenericApplicationContext springCtx = new GenericApplicationContext(); new XmlBeanDefinitionReader(springCtx).loadBeanDefinitions(new UrlResource(cfgUrl)); springCtx.refresh(); Collection<CacheTypeMetadata> tp = new ArrayList<>(springCtx.getBeansOfType(CacheTypeMetadata.class).values()); cc.setTypeMetadata(tp); } catch (BeansException e) { if (X.hasCause(e, ClassNotFoundException.class)) throw new IgniteCheckedException("Failed to instantiate Spring XML application context " + "(make sure all classes used in Spring configuration are present at CLASSPATH) " + "[springUrl=" + cfgUrl + ']', e); else throw new IgniteCheckedException("Failed to instantiate Spring XML application context [springUrl=" + cfgUrl + ", err=" + e.getMessage() + ']', e); } cc.setCacheStoreFactory(singletonFactory(store)); cc.setReadThrough(true); cc.setWriteThrough(true); cc.setLoadPreviousValue(true); return cc; } /** * @throws Exception If failed. */ public void testMultithreadedPut() throws Exception { IgniteInternalFuture<?> fut1 = runMultiThreadedAsync(new Callable<Object>() { private final Random rnd = new Random(); @Nullable @Override public Object call() throws Exception { for (int i = 0; i < TX_CNT; i++) { IgniteCache<Object, Object> cache = jcache(); int id = rnd.nextInt(1000); if (rnd.nextBoolean()) cache.put(new OrganizationKey(id), new Organization(id, "Name" + id, "City" + id)); else cache.put(new PersonKey(id), new Person(id, rnd.nextInt(), "Name" + id, 1)); } return null; } }, 4, "put"); IgniteInternalFuture<?> fut2 = runMultiThreadedAsync(new Callable<Object>() { private final Random rnd = new Random(); @Nullable @Override public Object call() throws Exception { for (int i = 0; i < TX_CNT; i++) { IgniteCache<Object, Object> cache = jcache(); int id = rnd.nextInt(1000); if (rnd.nextBoolean()) cache.putIfAbsent(new OrganizationKey(id), new Organization(id, "Name" + id, "City" + id)); else cache.putIfAbsent(new PersonKey(id), new Person(id, rnd.nextInt(), "Name" + id, i)); } return null; } }, 8, "putIfAbsent"); fut1.get(); fut2.get(); } /** * @throws Exception If failed. */ public void testMultithreadedPutAll() throws Exception { multithreaded(new Callable<Object>() { private final Random rnd = new Random(); @Nullable @Override public Object call() throws Exception { for (int i = 0; i < TX_CNT; i++) { int cnt = rnd.nextInt(BATCH_CNT); List<Integer> ids = new ArrayList<>(cnt); for (int j = 0; j < cnt; j++) { int id = rnd.nextInt(5000); if (!ids.contains(id)) ids.add(id); } Collections.sort(ids); Map<Object, Object> map = U.newLinkedHashMap(cnt); for (Integer id : ids) { if (rnd.nextBoolean()) map.put(new OrganizationKey(id), new Organization(id, "Name" + id, "City" + id)); else map.put(new PersonKey(id), new Person(id, rnd.nextInt(), "Name" + id, 1)); } IgniteCache<Object, Object> cache = jcache(); cache.putAll(map); } return null; } }, 8, "putAll"); } /** * @throws Exception If failed. */ public void testMultithreadedExplicitTx() throws Exception { runMultiThreaded(new Callable<Object>() { private final Random rnd = new Random(); @Nullable @Override public Object call() throws Exception { for (int i = 0; i < TX_CNT; i++) { IgniteCache<PersonKey, Person> cache = jcache(); try (Transaction tx = grid().transactions().txStart()) { cache.put(new PersonKey(1), new Person(1, rnd.nextInt(), "Name" + 1, 1)); cache.put(new PersonKey(2), new Person(2, rnd.nextInt(), "Name" + 2, 2)); cache.put(new PersonKey(3), new Person(3, rnd.nextInt(), "Name" + 3, 3)); cache.get(new PersonKey(1)); cache.get(new PersonKey(4)); Map<PersonKey, Person> map = U.newHashMap(2); map.put(new PersonKey(5), new Person(5, rnd.nextInt(), "Name" + 5, 5)); map.put(new PersonKey(6), new Person(6, rnd.nextInt(), "Name" + 6, 6)); cache.putAll(map); tx.commit(); } } return null; } }, 8, "tx"); } }
package php.runtime.ext.core; import php.runtime.Memory; import php.runtime.annotation.Runtime; import php.runtime.common.GrammarUtils; import php.runtime.env.CallStackItem; import php.runtime.env.Context; import php.runtime.env.Environment; import php.runtime.env.TraceInfo; import php.runtime.env.handler.ErrorHandler; import php.runtime.env.handler.ExceptionHandler; import php.runtime.env.handler.ShutdownHandler; import php.runtime.env.message.SystemMessage; import php.runtime.exceptions.support.ErrorType; import php.runtime.ext.core.classes.stream.Stream; import php.runtime.ext.support.compile.FunctionsContainer; import php.runtime.invoke.Invoker; import php.runtime.lang.Closure; import php.runtime.lang.ForeachIterator; import php.runtime.lang.IObject; import php.runtime.lang.Resource; import php.runtime.memory.ArrayMemory; import php.runtime.memory.LongMemory; import php.runtime.memory.ObjectMemory; import php.runtime.memory.StringMemory; import php.runtime.reflection.*; import php.runtime.util.StackTracer; import java.io.InputStream; import static php.runtime.annotation.Reflection.Name; public class LangFunctions extends FunctionsContainer { public static void sleep(int sec) throws InterruptedException { Thread.sleep(sec * 1000); } public static Memory compact(@Runtime.GetLocals ArrayMemory locals, Memory varName, Memory... varNames) { ArrayMemory result = new ArrayMemory(); Memory value = locals.valueOfIndex(varName).toValue(); if (value != Memory.UNDEFINED) result.refOfIndex(varName).assign(value.toImmutable()); if (varNames != null) { for (Memory el : varNames) { value = locals.valueOfIndex(el).toValue(); if (value != Memory.UNDEFINED) result.refOfIndex(el).assign(value.toImmutable()); } } return result.toImmutable(); } public static void register_shutdown_function(Environment env, TraceInfo trace, @Runtime.Reference Memory handler, Memory... args) { Invoker invoker = expectingCallback(env, trace, 1, handler); if (invoker != null) env.registerShutdownFunction(new ShutdownHandler(invoker, args)); } public static int error_reporting(Environment env, int level) { int old = env.getErrorFlags(); env.setErrorFlags(level); return old; } public static int error_reporting(Environment env) { return env.getErrorFlags(); } public static Memory error_get_last(Environment env) { SystemMessage err = env.getLastMessage(); if (err == null) return Memory.NULL; ArrayMemory result = new ArrayMemory(); result.refOfIndex("type").assign(err.getType().value); result.refOfIndex("message").assign(err.getMessage()); if (err.getTrace() != null && err.getTrace().trace != null) { result.refOfIndex("file").assign(err.getTrace().trace.getFileName()); result.refOfIndex("line").assign(err.getTrace().trace.getStartLine() + 1); result.refOfIndex("position").assign(err.getTrace().trace.getStartPosition() + 1); } return result.toConstant(); } public static boolean trigger_error(Environment env, TraceInfo trace, String message, int type) { ErrorType _type = ErrorType.valueOf(type); if (_type == null) return false; env.error(trace, _type, message); return true; } public static boolean user_error(Environment env, TraceInfo trace, String message, int type) { return trigger_error(env, trace, message, type); } public static boolean trigger_error(Environment env, TraceInfo trace, String message) { return trigger_error(env, trace, message, ErrorType.E_USER_NOTICE.value); } public static boolean user_error(Environment env, TraceInfo trace, String message) { return trigger_error(env, trace, message); } public static Memory set_error_handler(Environment env, TraceInfo trace, @Runtime.Reference Memory handler, int flags) { Invoker invoker = expectingCallback(env, trace, 1, handler); if (invoker != null) { ErrorHandler last = env.getErrorHandler(); env.setErrorHandler(new ErrorHandler(invoker, handler.toImmutable(), flags)); return last == null ? Memory.NULL : last.invokerMemory; } else return Memory.FALSE; } public static Memory set_error_handler(Environment env, TraceInfo trace, @Runtime.Reference Memory handler) { return set_error_handler(env, trace, handler, ErrorType.E_ALL.value | ErrorType.E_STRICT.value); } public static boolean restore_error_handler(Environment env) { env.setErrorHandler(env.getPreviousErrorHandler()); return true; } public static Memory set_exception_handler(Environment env, TraceInfo trace, @Runtime.Reference Memory handler) { Invoker invoker = expectingCallback(env, trace, 1, handler); if (invoker != null) { ExceptionHandler eh = env.getExceptionHandler(); env.setExceptionHandler(new ExceptionHandler(invoker, handler.toImmutable())); return eh == null || eh.invoker == null ? Memory.NULL : eh.invokerMemory; } else return Memory.FALSE; } public static boolean restore_exception_handler(Environment env) { env.setExceptionHandler(env.getPreviousExceptionHandler()); return true; } public static Memory get_defined_vars(Environment env, TraceInfo trace, @Runtime.GetLocals ArrayMemory locals) { return locals.toImmutable(); } public static int extract(Environment env, TraceInfo trace, @Runtime.GetLocals ArrayMemory locals, @Runtime.Reference Memory array, int extractType) { return extract(env, trace, locals, array, extractType, Memory.NULL); } public static int extract(Environment env, TraceInfo trace, @Runtime.GetLocals ArrayMemory locals, @Runtime.Reference Memory array) { return extract(env, trace, locals, array, LangConstants.EXTR_OVERWRITE, Memory.NULL); } public static int extract(Environment env, TraceInfo trace, @Runtime.GetLocals ArrayMemory locals, @Runtime.Reference Memory array, int extractType, Memory _prefix) { if (expecting(env, trace, 1, array, Memory.Type.ARRAY)) { boolean isRefs = (extractType & LangConstants.EXTR_REFS) == LangConstants.EXTR_REFS; ForeachIterator iterator = array.getNewIterator(env, isRefs, false); int count = 0; if (extractType == LangConstants.EXTR_PREFIX_ALL || extractType == LangConstants.EXTR_PREFIX_IF_EXISTS || extractType == LangConstants.EXTR_PREFIX_INVALID || extractType == LangConstants.EXTR_PREFIX_SAME) if (_prefix.isNull()) { env.warning(trace, "extract(): specified extract type requires the prefix parameter"); return 0; } String prefix = _prefix.isNull() ? "" : _prefix.concat("_"); if (!prefix.isEmpty()) if (!GrammarUtils.isValidName(prefix)) { env.warning(trace, "extract(): prefix is not a valid identifier"); return 0; } while (iterator.next()) { Object key = iterator.getKey(); String keyS = key.toString(); String var; Memory value = iterator.getValue(); if (!isRefs) value = value.toImmutable(); switch (extractType) { case LangConstants.EXTR_OVERWRITE: if (GrammarUtils.isValidName(keyS)) { locals.refOfIndex(keyS).assign(value); count++; } break; case LangConstants.EXTR_SKIP: if (GrammarUtils.isValidName(keyS) && locals.valueOfIndex(keyS).isUndefined()) { locals.refOfIndex(keyS).assign(value); count++; } break; case LangConstants.EXTR_PREFIX_SAME: if (!locals.valueOfIndex(keyS).isUndefined()) { var = prefix.concat(keyS); if (GrammarUtils.isValidName(var)) { locals.refOfIndex(var).assign(value); count++; } } else if (GrammarUtils.isValidName(keyS)) { locals.refOfIndex(keyS).assign(value); count++; } break; case LangConstants.EXTR_PREFIX_ALL: var = prefix.concat(keyS); if (GrammarUtils.isValidName(var)) { locals.refOfIndex(prefix.concat(keyS)).assign(value); count++; } break; case LangConstants.EXTR_PREFIX_INVALID: if (GrammarUtils.isValidName(keyS)) { locals.refOfIndex(keyS).assign(value); count++; } else { var = prefix.concat(keyS); if (GrammarUtils.isValidName(var)) { locals.refOfIndex(var).assign(value); count++; } } break; case LangConstants.EXTR_IF_EXISTS: if (GrammarUtils.isValidName(keyS)) if (!locals.valueOfIndex(keyS).isUndefined()) { locals.refOfIndex(keyS).assign(value); count++; } break; case LangConstants.EXTR_PREFIX_IF_EXISTS: if (!locals.valueOfIndex(keyS).isUndefined()) { var = prefix.concat(keyS); if (GrammarUtils.isValidName(var)) { locals.refOfIndex(var).assign(value); count++; } } break; } } return count; } else return 0; } public static boolean defined(Environment env, String name) { Memory value = env.findConstant(name); return value != null; } public static boolean define(Environment env, TraceInfo trace, String name, Memory value, boolean caseInSentisise) { return env.defineConstant(name, value, !caseInSentisise); } public static boolean define(Environment env, TraceInfo trace, String name, Memory value) { return define(env, trace, name, value, false); } public static Memory constant(Environment env, TraceInfo trace, String name) { int pos; if ((pos = name.indexOf("::")) > -1) { String className = name.substring(0, pos); String constName = name.substring(pos + 2); ClassEntity entity = env.fetchClass(className, true); if (entity == null) return Memory.NULL; ConstantEntity constant = entity.findConstant(constName); return constant == null ? Memory.NULL : constant.getValue(); } else { Memory value = env.findConstant(name); if (value == null) return Memory.NULL; else return value; } } @Runtime.Immutable public static String gettype(Memory memory) { switch (memory.getRealType()) { case ARRAY: return "array"; case BOOL: return "boolean"; case INT: return "integer"; case DOUBLE: return "double"; case STRING: return "string"; case OBJECT: if (memory.isResource()) return "resource"; return "object"; case NULL: return "NULL"; default: return "unknown type"; } } public static boolean is_array(@Runtime.Reference Memory memory) { return memory.isArray(); } @Runtime.Immutable public static boolean is_bool(Memory memory) { return memory.toValue().type == Memory.Type.BOOL; } @Runtime.Immutable public static boolean is_double(Memory memory) { return memory.toValue().type == Memory.Type.DOUBLE; } @Runtime.Immutable public static boolean is_float(Memory memory) { return memory.toValue().type == Memory.Type.DOUBLE; } @Runtime.Immutable public static boolean is_int(Memory memory) { return memory.toValue().type == Memory.Type.INT; } @Runtime.Immutable public static boolean is_integer(Memory memory) { return memory.toValue().type == Memory.Type.INT; } @Runtime.Immutable public static boolean is_long(Memory memory) { return memory.toValue().type == Memory.Type.INT; } @Runtime.Immutable public static boolean is_null(Memory memory) { return memory.isNull(); } public static boolean is_object(@Runtime.Reference Memory memory) { return memory.isObject(); } @Runtime.Immutable public static boolean is_real(Memory memory) { return is_float(memory); } @Runtime.Immutable public static boolean is_string(Memory memory) { return memory.isString(); } @Runtime.Immutable public static boolean is_number(Memory memory) { return StringMemory.toNumeric(memory.toString(), true, null) != null; } @Runtime.Immutable public static boolean is_scalar(Memory memory) { switch (memory.getRealType()) { case BOOL: case NULL: case INT: case DOUBLE: case STRING: return true; } return false; } public static boolean is_resource(@Runtime.Reference Memory memory) { return memory.isResource(); } public static Memory get_resource_type(@Runtime.Reference Memory memory) { if (memory.isObject()) { if (((ObjectMemory) memory).value instanceof Resource) return new StringMemory( ((Resource) ((ObjectMemory) memory).value).getResourceType() ); } return Memory.NULL; } public static boolean is_callable(Environment env, TraceInfo trace, @Runtime.Reference Memory memory) throws Throwable { // optimize if (memory.isObject() && memory.toValue(ObjectMemory.class).value instanceof Closure) return true; Invoker invoker = Invoker.valueOf(env, null, memory); return invoker != null && invoker.canAccess(env) == 0; } @Runtime.Immutable public static boolean boolval(Memory memory) { return memory.toBoolean(); } @Runtime.Immutable public static String strval(Memory memory) { return memory.toString(); } @Runtime.Immutable public static long intval(Memory memory) { return memory.toLong(); } @Runtime.Immutable public static double floatval(Memory memory) { return memory.toDouble(); } @Runtime.Immutable public static double doubleval(Memory memory) { return memory.toDouble(); } public static boolean settype(@Runtime.Reference Memory memory, String type) { if (memory.isReference()) { if ("string".equals(type)) { memory.assign(memory.toString()); } else if ("bool".equals(type) || "boolean".equals(type)) { memory.assign(memory.toBoolean()); } else if ("int".equals(type) || "integer".equals(type)) { memory.assign(memory.toLong()); } else if ("float".equals(type) || "double".equals(type)) { memory.assign(memory.toDouble()); } else if ("null".equals(type)) { memory.assign(Memory.NULL); } else return false; return true; } return false; } public void debug_zval_dump(Environment env, TraceInfo trace) { env.warning(trace, "debug_zval_dump(): unsupported"); } public static Memory func_get_args(Environment env, TraceInfo trace) { if (env.getCallStackTop() == 0) { return Memory.FALSE; } return ArrayMemory.of(env.peekCall(0).args).toConstant(); } public static Memory func_num_args(Environment env, TraceInfo trace) { if (env.getCallStackTop() == 0) { return Memory.FALSE; } return LongMemory.valueOf(env.peekCall(0).args.length); } public static Memory func_get_arg(Environment env, TraceInfo trace, int argNum) { if (env.getCallStackTop() == 0) { return Memory.FALSE; } if (argNum < 0) return Memory.FALSE; Memory[] args = env.peekCall(0).args; if (argNum < args.length) return args[argNum]; else return Memory.FALSE; } private static Memory _call_user_func(Environment env, TraceInfo trace, Memory function, Memory... args) throws Throwable { Invoker invoker = expectingCallback(env, trace, 1, function); if (invoker == null) { return Memory.FALSE; } invoker.setTrace(trace); return invoker.call(args); } public static Memory call_user_func(Environment env, TraceInfo trace, Memory function, Memory... args) throws Throwable { Memory[] passed; if (args == null) { passed = new Memory[]{function}; } else { passed = new Memory[args.length + 1]; System.arraycopy(args, 0, passed, 1, args.length); passed[0] = function; } env.pushCall(trace, null, passed, "call_user_func", null, null); try { return _call_user_func(env, trace, function, args); } finally { env.popCall(); } } public static Memory call_user_func_array(Environment env, TraceInfo trace, Memory function, Memory args) throws Throwable { if (expecting(env, trace, 2, args, Memory.Type.ARRAY)) { Memory[] passed = new Memory[]{function, args}; env.pushCall(trace, null, passed, "call_user_func_array", null, null); try { return _call_user_func(env, trace, function, ((ArrayMemory) args).values(false)); } finally { env.popCall(); } } return Memory.FALSE; } public static Memory debug_backtrace(Environment env, TraceInfo trace, int options, int limit) { boolean provideObject = (options & LangConstants.DEBUG_BACKTRACE_PROVIDE_OBJECT) == LangConstants.DEBUG_BACKTRACE_PROVIDE_OBJECT; boolean ignoreArgs = (options & LangConstants.DEBUG_BACKTRACE_IGNORE_ARGS) == LangConstants.DEBUG_BACKTRACE_IGNORE_ARGS; ArrayMemory result = new ArrayMemory(); for (int i = 0; i < env.getCallStackTop(); i++) { if (limit != 0 && i >= limit) break; CallStackItem item = env.peekCall(i); ArrayMemory el = item.toArray(provideObject, ignoreArgs); result.add(el); } return result.toConstant(); } public static Memory debug_backtrace(Environment env, TraceInfo trace, int options) { return debug_backtrace(env, trace, options, 0); } public static Memory debug_backtrace(Environment env, TraceInfo trace) { return debug_backtrace(env, trace, LangConstants.DEBUG_BACKTRACE_PROVIDE_OBJECT, 0); } public static void debug_print_backtrace(Environment env, TraceInfo trace, int options, int limit) { boolean provideObject = (options & LangConstants.DEBUG_BACKTRACE_PROVIDE_OBJECT) == LangConstants.DEBUG_BACKTRACE_PROVIDE_OBJECT; boolean ignoreArgs = (options & LangConstants.DEBUG_BACKTRACE_IGNORE_ARGS) == LangConstants.DEBUG_BACKTRACE_IGNORE_ARGS; StackTracer stackTracer = new StackTracer(env, limit); env.echo(stackTracer.toString(!ignoreArgs)); } public static void debug_print_backtrace(Environment env, TraceInfo trace, int options) { debug_print_backtrace(env, trace, options, 0); } public static void debug_print_backtrace(Environment env, TraceInfo trace) { debug_print_backtrace(env, trace, LangConstants.DEBUG_BACKTRACE_PROVIDE_OBJECT, 0); } public static boolean function_exists(Environment env, String name) { FunctionEntity function = env.fetchFunction(name); return function != null; } public static boolean class_exists(Environment env, String name, boolean autoload) { ClassEntity entity = env.fetchClass(name, autoload); return entity != null && entity.isClass(); } public static boolean class_exists(Environment env, String name) { return class_exists(env, name, true); } public static boolean interface_exists(Environment env, String name, boolean autoload) { ClassEntity entity = env.fetchClass(name, autoload); return entity != null && entity.isInterface(); } public static boolean interface_exists(Environment env, String name) { return interface_exists(env, name, true); } public static boolean trait_exists(Environment env, String name, boolean autoload) { ClassEntity entity = env.fetchClass(name, autoload); return entity != null && entity.isTrait(); } public static boolean trait_exists(Environment env, String name) { return trait_exists(env, name, true); } public static boolean method_exists(Environment env, Memory clazz, String method) { ClassEntity classEntity; if (clazz.isObject()) { ObjectMemory tmp = clazz.toValue(ObjectMemory.class); classEntity = tmp.getReflection(); } else { String name = clazz.toString(); String nameL = name.toLowerCase(); classEntity = env.fetchClass(name, nameL, true); if (classEntity == null) classEntity = env.fetchMagicClass(name, nameL); } return classEntity.findMethod(method.toLowerCase()) != null; } public static Memory property_exists(Environment env, Memory clazz, String property) throws Throwable { ClassEntity classEntity; IObject object = null; boolean isMagic = false; if (clazz.isObject()) { ObjectMemory tmp = clazz.toValue(ObjectMemory.class); classEntity = tmp.getReflection(); object = tmp.value; } else { String name = clazz.toString(); String nameL = name.toLowerCase(); classEntity = env.fetchClass(name, nameL, true); if (classEntity == null) { classEntity = env.fetchMagicClass(name, nameL); isMagic = true; } } if (classEntity == null) { return Memory.FALSE; } if (object != null) { ArrayMemory props = object.getProperties(); ClassEntity context = env.getLastClassOnStack(); PropertyEntity entity = classEntity.isInstanceOf(context) ? context.properties.get(property) : classEntity.properties.get(property); int accessFlags = entity == null ? 0 : entity.canAccess(env); if (accessFlags != 0) return Memory.FALSE; return (props != null && props.getByScalar(entity == null ? property : entity.getSpecificName()) != null) ? Memory.TRUE : Memory.FALSE; } else { PropertyEntity entity = classEntity.properties.get(property); if (isMagic) { int accessFlags = entity == null ? 0 : entity.canAccess(env); if (accessFlags != 0) return Memory.FALSE; } return entity != null ? Memory.TRUE : Memory.FALSE; } } public static Memory is_a(Environment env, TraceInfo trace, Memory object, String className, boolean allowedString) { ClassEntity classEntity = null; ClassEntity parentClass; if (allowedString && !object.isObject()) { String name = object.toString(); String nameL = name.toLowerCase(); classEntity = env.fetchClass(name, nameL, false); if (classEntity == null) classEntity = env.fetchMagicClass(name, nameL); } else if (expecting(env, trace, 1, object, Memory.Type.OBJECT)) { classEntity = object.toValue(ObjectMemory.class).getReflection(); } if (classEntity == null) return Memory.FALSE; parentClass = env.fetchClass(className, false); if (parentClass == null) return Memory.FALSE; return classEntity.isInstanceOf(parentClass) ? Memory.TRUE : Memory.FALSE; } public static Memory is_a(Environment env, TraceInfo trace, Memory object, String className) { return is_a(env, trace, object, className, false); } public static Memory is_subclass_of(Environment env, TraceInfo trace, Memory object, String className, boolean allowedString) { ClassEntity classEntity = null; ClassEntity parentClass; if (allowedString && !object.isObject()) { String name = object.toString(); String nameL = name.toLowerCase(); classEntity = env.fetchClass(name, nameL, true); if (classEntity == null) classEntity = env.fetchMagicClass(name, nameL); } else if (expecting(env, trace, 1, object, Memory.Type.OBJECT)) { classEntity = object.toValue(ObjectMemory.class).getReflection(); } parentClass = env.fetchClass(className, true); if (classEntity == null) { return Memory.NULL; } else if (parentClass == null) { return Memory.NULL; } else { if (object.isObject() && object.toValue(ObjectMemory.class).value instanceof Closure) return Memory.FALSE; return classEntity.isInstanceOf(parentClass) && !classEntity.equals(parentClass) ? Memory.TRUE : Memory.FALSE; } } public static Memory is_subclass_of(Environment env, TraceInfo trace, Memory object, String className) { return is_subclass_of(env, trace, object, className, true); } public static Memory get_class(Environment env, TraceInfo trace, Memory object) { if (object.isNull()) { if (object == Memory.UNDEFINED) { return Memory.FALSE; } CallStackItem item = env.peekCall(0); if (item.clazz != null) { if (item.classEntity == null) item.classEntity = env.fetchClass(item.clazz, false); if (item.classEntity == null) return Memory.FALSE; else { MethodEntity method = item.classEntity.findMethod(item.function); if (method == null) return Memory.FALSE; return new StringMemory(method.getClazz().getName()); } } } else if (expecting(env, trace, 1, object, Memory.Type.OBJECT)) { return new StringMemory(object.toValue(ObjectMemory.class).getReflection().getName()); } return Memory.FALSE; } public static Memory get_class(Environment env, TraceInfo trace) { return get_class(env, trace, Memory.NULL); } public static Memory get_called_class(Environment env) { String name = env.getLateStatic(); return name == null || name.isEmpty() ? Memory.FALSE : new StringMemory(name); } public static Memory get_class_methods(Environment env, TraceInfo trace, Memory value) { ClassEntity entity; if (value.isString()) { entity = env.fetchClass(value.toString(), true); } else if (value.isObject()) { entity = value.toValue(ObjectMemory.class).getReflection(); } else { env.warning( trace, "get_class_methods(): Argument 1 must be string or object, %s given", value.getRealType().toString() ); return Memory.NULL; } if (entity == null) return Memory.NULL; ClassEntity context = env.getLastClassOnStack(); ArrayMemory result = new ArrayMemory(); for (MethodEntity el : entity.getMethods().values()) { if (el.canAccess(env, context) == 0) result.refOfPush().assign(el.getName()); } return result.toConstant(); } public static Memory get_class_vars(Environment env, TraceInfo trace, Memory value) { ClassEntity entity; if (value.isString()) { entity = env.fetchClass(value.toString(), true); } else if (value.isObject()) { entity = value.toValue(ObjectMemory.class).getReflection(); } else { env.warning( trace, "get_class_vars(): Argument 1 must be string or object, %s given", value.getRealType().toString() ); return Memory.NULL; } if (entity == null) return Memory.NULL; ClassEntity context = env.getLastClassOnStack(); ArrayMemory result = new ArrayMemory(); for (PropertyEntity el : entity.getProperties()) { if (el.canAccess(env, context) == 0) result.refOfIndex(el.getName()).assign(el.getDefaultValue(env)); } for (PropertyEntity el : entity.getStaticProperties()) { if (el.canAccess(env, context) == 0) result.refOfIndex(el.getName()).assign(el.getDefaultValue(env)); } return result.toConstant(); } public static Memory get_object_vars(Environment env, TraceInfo trace, Memory object) { if (expecting(env, trace, 1, object, Memory.Type.OBJECT)) { ObjectMemory o = object.toValue(ObjectMemory.class); ArrayMemory props = o.value.getProperties(); ClassEntity entity = o.getReflection(); ClassEntity context = env.getLastClassOnStack(); ForeachIterator iterator = props.foreachIterator(false, false); ArrayMemory result = new ArrayMemory(); while (iterator.next()) { PropertyEntity prop = entity.findProperty(iterator.getKey().toString()); if (prop == null || prop.canAccess(env, context) == 0) result.refOfIndex(prop == null ? iterator.getKey().toString() : prop.getName()) .assign(iterator.getValue()); } return result.toConstant(); } else return Memory.NULL; } public static Memory get_parent_class(Memory object) { if (object.isObject()) { ClassEntity classEntity = object.toValue(ObjectMemory.class).getReflection().getParent(); if (classEntity == null) return Memory.FALSE; else return new StringMemory(classEntity.getName()); } else { return Memory.FALSE; } } public static Memory get_parent_class(Environment env) { CallStackItem item = env.peekCall(0); if (item.clazz != null) { if (item.classEntity == null) item.classEntity = env.fetchClass(item.clazz, false); if (item.classEntity == null) return Memory.FALSE; else { MethodEntity method = item.classEntity.findMethod(item.function); if (method == null) return Memory.FALSE; ClassEntity parent = method.getClazz().getParent(); return parent == null ? Memory.FALSE : new StringMemory(parent.getName()); } } return Memory.FALSE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.service; import java.io.IOException; import java.io.Serializable; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeoutException; import javax.annotation.Nullable; import javax.management.NotificationEmitter; import javax.management.openmbean.CompositeData; import javax.management.openmbean.OpenDataException; import javax.management.openmbean.TabularData; import org.apache.cassandra.db.ColumnFamilyStoreMBean; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.utils.Pair; public interface StorageServiceMBean extends NotificationEmitter { /** * Retrieve the list of live nodes in the cluster, where "liveness" is * determined by the failure detector of the node being queried. * * @return set of IP addresses, as Strings */ @Deprecated public List<String> getLiveNodes(); public List<String> getLiveNodesWithPort(); /** * Retrieve the list of unreachable nodes in the cluster, as determined * by this node's failure detector. * * @return set of IP addresses, as Strings */ @Deprecated public List<String> getUnreachableNodes(); public List<String> getUnreachableNodesWithPort(); /** * Retrieve the list of nodes currently bootstrapping into the ring. * * @return set of IP addresses, as Strings */ @Deprecated public List<String> getJoiningNodes(); public List<String> getJoiningNodesWithPort(); /** * Retrieve the list of nodes currently leaving the ring. * * @return set of IP addresses, as Strings */ @Deprecated public List<String> getLeavingNodes(); public List<String> getLeavingNodesWithPort(); /** * Retrieve the list of nodes currently moving in the ring. * * @return set of IP addresses, as Strings */ @Deprecated public List<String> getMovingNodes(); public List<String> getMovingNodesWithPort(); /** * Fetch string representations of the tokens for this node. * * @return a collection of tokens formatted as strings */ public List<String> getTokens(); /** * Fetch string representations of the tokens for a specified node. * * @param endpoint string representation of an node * @return a collection of tokens formatted as strings */ public List<String> getTokens(String endpoint) throws UnknownHostException; /** * Fetch a string representation of the Cassandra version. * @return A string representation of the Cassandra version. */ public String getReleaseVersion(); /** * Fetch a string representation of the current Schema version. * @return A string representation of the Schema version. */ public String getSchemaVersion(); /** * Fetch the replication factor for a given keyspace. * @return An integer that represents replication factor for the given keyspace. */ public String getKeyspaceReplicationInfo(String keyspaceName); /** * Get the list of all data file locations from conf * @return String array of all locations */ public String[] getAllDataFileLocations(); /** * Get location of the commit log * @return a string path */ public String getCommitLogLocation(); /** * Get location of the saved caches dir * @return a string path */ public String getSavedCachesLocation(); /** * Retrieve a map of range to end points that describe the ring topology * of a Cassandra cluster. * * @return mapping of ranges to end points */ @Deprecated public Map<List<String>, List<String>> getRangeToEndpointMap(String keyspace); public Map<List<String>, List<String>> getRangeToEndpointWithPortMap(String keyspace); /** * Retrieve a map of range to rpc addresses that describe the ring topology * of a Cassandra cluster. * * @return mapping of ranges to rpc addresses */ @Deprecated public Map<List<String>, List<String>> getRangeToRpcaddressMap(String keyspace); public Map<List<String>, List<String>> getRangeToNativeaddressWithPortMap(String keyspace); /** * The same as {@code describeRing(String)} but converts TokenRange to the String for JMX compatibility * * @param keyspace The keyspace to fetch information about * * @return a List of TokenRange(s) converted to String for the given keyspace */ @Deprecated public List <String> describeRingJMX(String keyspace) throws IOException; public List<String> describeRingWithPortJMX(String keyspace) throws IOException; /** * Retrieve a map of pending ranges to endpoints that describe the ring topology * @param keyspace the keyspace to get the pending range map for. * @return a map of pending ranges to endpoints */ @Deprecated public Map<List<String>, List<String>> getPendingRangeToEndpointMap(String keyspace); public Map<List<String>, List<String>> getPendingRangeToEndpointWithPortMap(String keyspace); /** * Retrieve a map of tokens to endpoints, including the bootstrapping * ones. * * @return a map of tokens to endpoints in ascending order */ @Deprecated public Map<String, String> getTokenToEndpointMap(); public Map<String, String> getTokenToEndpointWithPortMap(); /** Retrieve this hosts unique ID */ public String getLocalHostId(); /** {@link StorageServiceMBean#getEndpointToHostId} */ @Deprecated public Map<String, String> getHostIdMap(); /** Retrieve the mapping of endpoint to host ID */ @Deprecated public Map<String, String> getEndpointToHostId(); public Map<String, String> getEndpointWithPortToHostId(); /** Retrieve the mapping of host ID to endpoint */ @Deprecated public Map<String, String> getHostIdToEndpoint(); public Map<String, String> getHostIdToEndpointWithPort(); /** Human-readable load value */ public String getLoadString(); /** Human-readable load value. Keys are IP addresses. */ @Deprecated public Map<String, String> getLoadMap(); public Map<String, String> getLoadMapWithPort(); /** * Return the generation value for this node. * * @return generation number */ public int getCurrentGenerationNumber(); /** * This method returns the N endpoints that are responsible for storing the * specified key i.e for replication. * * @param keyspaceName keyspace name * @param cf Column family name * @param key - key for which we need to find the endpoint return value - * the endpoint responsible for this key */ @Deprecated public List<InetAddress> getNaturalEndpoints(String keyspaceName, String cf, String key); public List<String> getNaturalEndpointsWithPort(String keyspaceName, String cf, String key); @Deprecated public List<InetAddress> getNaturalEndpoints(String keyspaceName, ByteBuffer key); public List<String> getNaturalEndpointsWithPort(String keysapceName, ByteBuffer key); /** * @deprecated use {@link #takeSnapshot(String tag, Map options, String... entities)} instead. */ @Deprecated public void takeSnapshot(String tag, String... keyspaceNames) throws IOException; /** * @deprecated use {@link #takeSnapshot(String tag, Map options, String... entities)} instead. */ @Deprecated public void takeTableSnapshot(String keyspaceName, String tableName, String tag) throws IOException; /** * @deprecated use {@link #takeSnapshot(String tag, Map options, String... entities)} instead. */ @Deprecated public void takeMultipleTableSnapshot(String tag, String... tableList) throws IOException; /** * Takes the snapshot of a multiple column family from different keyspaces. A snapshot name must be specified. * * @param tag * the tag given to the snapshot; may not be null or empty * @param options * Map of options (skipFlush is the only supported option for now) * @param entities * list of keyspaces / tables in the form of empty | ks1 ks2 ... | ks1.cf1,ks2.cf2,... */ public void takeSnapshot(String tag, Map<String, String> options, String... entities) throws IOException; /** * Remove the snapshot with the given name from the given keyspaces. * If no tag is specified we will remove all snapshots. */ public void clearSnapshot(String tag, String... keyspaceNames) throws IOException; /** * Get the details of all the snapshot * @return A map of snapshotName to all its details in Tabular form. */ public Map<String, TabularData> getSnapshotDetails(); /** * Get the true size taken by all snapshots across all keyspaces. * @return True size taken by all the snapshots. */ public long trueSnapshotsSize(); /** * Forces refresh of values stored in system.size_estimates of all column families. */ public void refreshSizeEstimates() throws ExecutionException; /** * Removes extraneous entries in system.size_estimates. */ public void cleanupSizeEstimates(); /** * Forces major compaction of a single keyspace */ public void forceKeyspaceCompaction(boolean splitOutput, String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException; @Deprecated public int relocateSSTables(String keyspace, String ... cfnames) throws IOException, ExecutionException, InterruptedException; public int relocateSSTables(int jobs, String keyspace, String ... cfnames) throws IOException, ExecutionException, InterruptedException; /** * Forces major compaction of specified token range in a single keyspace */ public void forceKeyspaceCompactionForTokenRange(String keyspaceName, String startToken, String endToken, String... tableNames) throws IOException, ExecutionException, InterruptedException; /** * Trigger a cleanup of keys on a single keyspace */ @Deprecated public int forceKeyspaceCleanup(String keyspaceName, String... tables) throws IOException, ExecutionException, InterruptedException; public int forceKeyspaceCleanup(int jobs, String keyspaceName, String... tables) throws IOException, ExecutionException, InterruptedException; /** * Scrub (deserialize + reserialize at the latest version, skipping bad rows if any) the given keyspace. * If tableNames array is empty, all CFs are scrubbed. * * Scrubbed CFs will be snapshotted first, if disableSnapshot is false */ @Deprecated public int scrub(boolean disableSnapshot, boolean skipCorrupted, String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException; @Deprecated public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException; @Deprecated public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, int jobs, String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException; public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, boolean reinsertOverflowedTTL, int jobs, String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException; /** * Verify (checksums of) the given keyspace. * If tableNames array is empty, all CFs are verified. * * The entire sstable will be read to ensure each cell validates if extendedVerify is true */ public int verify(boolean extendedVerify, String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException; public int verify(boolean extendedVerify, boolean checkVersion, boolean diskFailurePolicy, boolean mutateRepairStatus, boolean checkOwnsTokens, boolean quick, String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException; /** * Rewrite all sstables to the latest version. * Unlike scrub, it doesn't skip bad rows and do not snapshot sstables first. */ @Deprecated public int upgradeSSTables(String keyspaceName, boolean excludeCurrentVersion, String... tableNames) throws IOException, ExecutionException, InterruptedException; public int upgradeSSTables(String keyspaceName, boolean excludeCurrentVersion, int jobs, String... tableNames) throws IOException, ExecutionException, InterruptedException; /** * Rewrites all sstables from the given tables to remove deleted data. * The tombstone option defines the granularity of the procedure: ROW removes deleted partitions and rows, CELL also removes overwritten or deleted cells. */ public int garbageCollect(String tombstoneOption, int jobs, String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException; /** * Flush all memtables for the given column families, or all columnfamilies for the given keyspace * if none are explicitly listed. * @param keyspaceName * @param tableNames * @throws IOException */ public void forceKeyspaceFlush(String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException; /** * Invoke repair asynchronously. * You can track repair progress by subscribing JMX notification sent from this StorageServiceMBean. * Notification format is: * type: "repair" * userObject: int array of length 2, [0]=command number, [1]=ordinal of ActiveRepairService.Status * * @param keyspace Keyspace name to repair. Should not be null. * @param options repair option. * @return Repair command number, or 0 if nothing to repair */ public int repairAsync(String keyspace, Map<String, String> options); public void forceTerminateAllRepairSessions(); public void setRepairSessionMaxTreeDepth(int depth); public int getRepairSessionMaxTreeDepth(); /** * Get the status of a given parent repair session. * @param cmd the int reference returned when issuing the repair * @return status of parent repair from enum {@link org.apache.cassandra.repair.RepairRunnable.Status} * followed by final message or messages of the session */ @Nullable public List<String> getParentRepairStatus(int cmd); /** * transfer this node's data to other machines and remove it from service. * @param force Decommission even if this will reduce N to be less than RF. */ public void decommission(boolean force) throws InterruptedException; /** * @param newToken token to move this node to. * This node will unload its data onto its neighbors, and bootstrap to the new token. */ public void move(String newToken) throws IOException; /** * removeToken removes token (and all data associated with * enpoint that had it) from the ring */ public void removeNode(String token); /** * Get the status of a token removal. */ @Deprecated public String getRemovalStatus(); public String getRemovalStatusWithPort(); /** * Force a remove operation to finish. */ public void forceRemoveCompletion(); /** * set the logging level at runtime<br> * <br> * If both classQualifer and level are empty/null, it will reload the configuration to reset.<br> * If classQualifer is not empty but level is empty/null, it will set the level to null for the defined classQualifer<br> * If level cannot be parsed, then the level will be defaulted to DEBUG<br> * <br> * The logback configuration should have {@code < jmxConfigurator />} set * * @param classQualifier The logger's classQualifer * @param level The log level * @throws Exception * * @see ch.qos.logback.classic.Level#toLevel(String) */ public void setLoggingLevel(String classQualifier, String level) throws Exception; /** get the runtime logging levels */ public Map<String,String> getLoggingLevels(); /** get the operational mode (leaving, joining, normal, decommissioned, client) **/ public String getOperationMode(); /** Returns whether the storage service is starting or not */ public boolean isStarting(); /** get the progress of a drain operation */ public String getDrainProgress(); /** makes node unavailable for writes, flushes memtables and replays commitlog. */ public void drain() throws IOException, InterruptedException, ExecutionException; /** * Truncates (deletes) the given table from the provided keyspace. * Calling truncate results in actual deletion of all data in the cluster * under the given table and it will fail unless all hosts are up. * All data in the given column family will be deleted, but its definition * will not be affected. * * @param keyspace The keyspace to delete from * @param table The column family to delete data from. */ public void truncate(String keyspace, String table)throws TimeoutException, IOException; /** * given a list of tokens (representing the nodes in the cluster), returns * a mapping from {@code "token -> %age of cluster owned by that token"} */ @Deprecated public Map<InetAddress, Float> getOwnership(); public Map<String, Float> getOwnershipWithPort(); /** * Effective ownership is % of the data each node owns given the keyspace * we calculate the percentage using replication factor. * If Keyspace == null, this method will try to verify if all the keyspaces * in the cluster have the same replication strategies and if yes then we will * use the first else a empty Map is returned. */ @Deprecated public Map<InetAddress, Float> effectiveOwnership(String keyspace) throws IllegalStateException; public Map<String, Float> effectiveOwnershipWithPort(String keyspace) throws IllegalStateException; public List<String> getKeyspaces(); public List<String> getNonSystemKeyspaces(); public List<String> getNonLocalStrategyKeyspaces(); @Deprecated public Map<String, String> getViewBuildStatuses(String keyspace, String view); public Map<String, String> getViewBuildStatusesWithPort(String keyspace, String view); /** * Change endpointsnitch class and dynamic-ness (and dynamic attributes) at runtime. * * This method is used to change the snitch implementation and/or dynamic snitch parameters. * If {@code epSnitchClassName} is specified, it will configure a new snitch instance and make it a * 'dynamic snitch' if {@code dynamic} is specified and {@code true}. * * The parameters {@code dynamicUpdateInterval}, {@code dynamicResetInterval} and {@code dynamicBadnessThreshold} * can be specified individually to update the parameters of the dynamic snitch during runtime. * * @param epSnitchClassName the canonical path name for a class implementing IEndpointSnitch * @param dynamic boolean that decides whether dynamicsnitch is used or not - only valid, if {@code epSnitchClassName} is specified * @param dynamicUpdateInterval integer, in ms (defaults to the value configured in cassandra.yaml, which defaults to 100) * @param dynamicResetInterval integer, in ms (defaults to the value configured in cassandra.yaml, which defaults to 600,000) * @param dynamicBadnessThreshold double, (defaults to the value configured in cassandra.yaml, which defaults to 0.0) */ public void updateSnitch(String epSnitchClassName, Boolean dynamic, Integer dynamicUpdateInterval, Integer dynamicResetInterval, Double dynamicBadnessThreshold) throws ClassNotFoundException; /* Update dynamic_snitch_update_interval_in_ms */ public void setDynamicUpdateInterval(int dynamicUpdateInterval); /* Get dynamic_snitch_update_interval_in_ms */ public int getDynamicUpdateInterval(); // allows a user to forcibly 'kill' a sick node public void stopGossiping(); // allows a user to recover a forcibly 'killed' node public void startGossiping(); // allows a user to see whether gossip is running or not public boolean isGossipRunning(); // allows a user to forcibly completely stop cassandra public void stopDaemon(); // to determine if initialization has completed public boolean isInitialized(); public void stopNativeTransport(); public void startNativeTransport(); public boolean isNativeTransportRunning(); public void enableNativeTransportOldProtocolVersions(); public void disableNativeTransportOldProtocolVersions(); // sets limits on number of concurrent requests in flights in number of bytes public long getNativeTransportMaxConcurrentRequestsInBytes(); public void setNativeTransportMaxConcurrentRequestsInBytes(long newLimit); public long getNativeTransportMaxConcurrentRequestsInBytesPerIp(); public void setNativeTransportMaxConcurrentRequestsInBytesPerIp(long newLimit); // allows a node that have been started without joining the ring to join it public void joinRing() throws IOException; public boolean isJoined(); public boolean isDrained(); public boolean isDraining(); /** Check if currently bootstrapping. * Note this becomes false before {@link org.apache.cassandra.db.SystemKeyspace#bootstrapComplete()} is called, * as setting bootstrap to complete is called only when the node joins the ring. * @return True prior to bootstrap streaming completing. False prior to start of bootstrap and post streaming. */ public boolean isBootstrapMode(); public void setRpcTimeout(long value); public long getRpcTimeout(); public void setReadRpcTimeout(long value); public long getReadRpcTimeout(); public void setRangeRpcTimeout(long value); public long getRangeRpcTimeout(); public void setWriteRpcTimeout(long value); public long getWriteRpcTimeout(); public void setInternodeTcpConnectTimeoutInMS(int value); public int getInternodeTcpConnectTimeoutInMS(); public void setInternodeTcpUserTimeoutInMS(int value); public int getInternodeTcpUserTimeoutInMS(); public void setCounterWriteRpcTimeout(long value); public long getCounterWriteRpcTimeout(); public void setCasContentionTimeout(long value); public long getCasContentionTimeout(); public void setTruncateRpcTimeout(long value); public long getTruncateRpcTimeout(); public void setStreamThroughputMbPerSec(int value); public int getStreamThroughputMbPerSec(); public void setInterDCStreamThroughputMbPerSec(int value); public int getInterDCStreamThroughputMbPerSec(); public int getCompactionThroughputMbPerSec(); public void setCompactionThroughputMbPerSec(int value); public int getBatchlogReplayThrottleInKB(); public void setBatchlogReplayThrottleInKB(int value); public int getConcurrentCompactors(); public void setConcurrentCompactors(int value); public void bypassConcurrentValidatorsLimit(); public void enforceConcurrentValidatorsLimit(); public boolean isConcurrentValidatorsLimitEnforced(); public int getConcurrentValidators(); public void setConcurrentValidators(int value); public int getSSTablePreemptiveOpenIntervalInMB(); public void setSSTablePreemptiveOpenIntervalInMB(int intervalInMB); public boolean getMigrateKeycacheOnCompaction(); public void setMigrateKeycacheOnCompaction(boolean invalidateKeyCacheOnCompaction); public int getConcurrentViewBuilders(); public void setConcurrentViewBuilders(int value); public boolean isIncrementalBackupsEnabled(); public void setIncrementalBackupsEnabled(boolean value); /** * Initiate a process of streaming data for which we are responsible from other nodes. It is similar to bootstrap * except meant to be used on a node which is already in the cluster (typically containing no data) as an * alternative to running repair. * * @param sourceDc Name of DC from which to select sources for streaming or null to pick any node */ public void rebuild(String sourceDc); /** * Same as {@link #rebuild(String)}, but only for specified keyspace and ranges. * * @param sourceDc Name of DC from which to select sources for streaming or null to pick any node * @param keyspace Name of the keyspace which to rebuild or null to rebuild all keyspaces. * @param tokens Range of tokens to rebuild or null to rebuild all token ranges. In the format of: * "(start_token_1,end_token_1],(start_token_2,end_token_2],...(start_token_n,end_token_n]" */ public void rebuild(String sourceDc, String keyspace, String tokens, String specificSources); /** Starts a bulk load and blocks until it completes. */ public void bulkLoad(String directory); /** * Starts a bulk load asynchronously and returns the String representation of the planID for the new * streaming session. */ public String bulkLoadAsync(String directory); public void rescheduleFailedDeletions(); /** * Load new SSTables to the given keyspace/table * * @param ksName The parent keyspace name * @param tableName The ColumnFamily name where SSTables belong * * @see ColumnFamilyStoreMBean#loadNewSSTables() */ @Deprecated public void loadNewSSTables(String ksName, String tableName); /** * Return a List of Tokens representing a sample of keys across all ColumnFamilyStores. * * Note: this should be left as an operation, not an attribute (methods starting with "get") * to avoid sending potentially multiple MB of data when accessing this mbean by default. See CASSANDRA-4452. * * @return set of Tokens as Strings */ public List<String> sampleKeyRange(); /** * rebuild the specified indexes */ public void rebuildSecondaryIndex(String ksName, String cfName, String... idxNames); public void resetLocalSchema() throws IOException; public void reloadLocalSchema(); /** * Enables/Disables tracing for the whole system. * * @param probability * ]0,1[ will enable tracing on a partial number of requests with the provided probability. 0 will * disable tracing and 1 will enable tracing for all requests (which mich severely cripple the system) */ public void setTraceProbability(double probability); public Map<String, List<CompositeData>> samplePartitions(int duration, int capacity, int count, List<String> samplers) throws OpenDataException; /** * Returns the configured tracing probability. */ public double getTraceProbability(); void disableAutoCompaction(String ks, String ... tables) throws IOException; void enableAutoCompaction(String ks, String ... tables) throws IOException; Map<String, Boolean> getAutoCompactionStatus(String ks, String... tables) throws IOException; public void deliverHints(String host) throws UnknownHostException; /** Returns the name of the cluster */ public String getClusterName(); /** Returns the cluster partitioner */ public String getPartitionerName(); /** Returns the threshold for warning of queries with many tombstones */ public int getTombstoneWarnThreshold(); /** Sets the threshold for warning queries with many tombstones */ public void setTombstoneWarnThreshold(int tombstoneDebugThreshold); /** Returns the threshold for abandoning queries with many tombstones */ public int getTombstoneFailureThreshold(); /** Sets the threshold for abandoning queries with many tombstones */ public void setTombstoneFailureThreshold(int tombstoneDebugThreshold); /** Returns the threshold for skipping the column index when caching partition info **/ public int getColumnIndexCacheSize(); /** Sets the threshold for skipping the column index when caching partition info **/ public void setColumnIndexCacheSize(int cacheSizeInKB); /** Returns the threshold for rejecting queries due to a large batch size */ public int getBatchSizeFailureThreshold(); /** Sets the threshold for rejecting queries due to a large batch size */ public void setBatchSizeFailureThreshold(int batchSizeDebugThreshold); /** Returns the threshold for warning queries due to a large batch size */ public int getBatchSizeWarnThreshold(); /** Sets the threshold for warning queries due to a large batch size */ public void setBatchSizeWarnThreshold(int batchSizeDebugThreshold); /** Sets the hinted handoff throttle in kb per second, per delivery thread. */ public void setHintedHandoffThrottleInKB(int throttleInKB); /** * Resume bootstrap streaming when there is failed data streaming. * * * @return true if the node successfully starts resuming. (this does not mean bootstrap streaming was success.) */ public boolean resumeBootstrap(); /** Gets the concurrency settings for processing stages*/ static class StageConcurrency implements Serializable { public final int corePoolSize; public final int maximumPoolSize; public StageConcurrency(int corePoolSize, int maximumPoolSize) { this.corePoolSize = corePoolSize; this.maximumPoolSize = maximumPoolSize; } } public Map<String, List<Integer>> getConcurrency(List<String> stageNames); /** Sets the concurrency setting for processing stages */ public void setConcurrency(String threadPoolName, int newCorePoolSize, int newMaximumPoolSize); /** Clears the history of clients that have connected in the past **/ void clearConnectionHistory(); public void disableAuditLog(); public void enableAuditLog(String loggerName, Map<String, String> parameters, String includedKeyspaces, String excludedKeyspaces, String includedCategories, String excludedCategories, String includedUsers, String excludedUsers) throws ConfigurationException; public void enableAuditLog(String loggerName, String includedKeyspaces, String excludedKeyspaces, String includedCategories, String excludedCategories, String includedUsers, String excludedUsers) throws ConfigurationException; public boolean isAuditLogEnabled(); public String getCorruptedTombstoneStrategy(); public void setCorruptedTombstoneStrategy(String strategy); /** * Start the fully query logger. * @param path Path where the full query log will be stored. If null cassandra.yaml value is used. * @param rollCycle How often to create a new file for query data (MINUTELY, DAILY, HOURLY) * @param blocking Whether threads submitting queries to the query log should block if they can't be drained to the filesystem or alternatively drops samples and log * @param maxQueueWeight How many bytes of query data to queue before blocking or dropping samples * @param maxLogSize How many bytes of log data to store before dropping segments. Might not be respected if a log file hasn't rolled so it can be deleted. * @param archiveCommand executable archiving the rolled log files, * @param maxArchiveRetries max number of times to retry a failing archive command * */ public void enableFullQueryLogger(String path, String rollCycle, Boolean blocking, int maxQueueWeight, long maxLogSize, @Nullable String archiveCommand, int maxArchiveRetries); /** * Disable the full query logger if it is enabled. * Also delete any generated files in the last used full query log path as well as the one configure in cassandra.yaml */ public void resetFullQueryLogger(); /** * Stop logging queries but leave any generated files on disk. */ public void stopFullQueryLogger(); /** Sets the initial allocation size of backing arrays for new RangeTombstoneList objects */ public void setInitialRangeTombstoneListAllocationSize(int size); /** Returns the initial allocation size of backing arrays for new RangeTombstoneList objects */ public int getInitialRangeTombstoneListAllocationSize(); /** Sets the resize factor to use when growing/resizing a RangeTombstoneList */ public void setRangeTombstoneListResizeGrowthFactor(double growthFactor); /** Returns the resize factor to use when growing/resizing a RangeTombstoneList */ public double getRangeTombstoneResizeListGrowthFactor(); }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2020_04_01.implementation; import com.microsoft.azure.management.network.v2020_04_01.FlowLog; import com.microsoft.azure.arm.model.implementation.CreatableUpdatableImpl; import rx.Observable; import java.util.Map; import com.microsoft.azure.management.network.v2020_04_01.RetentionPolicyParameters; import com.microsoft.azure.management.network.v2020_04_01.FlowLogFormatParameters; import com.microsoft.azure.management.network.v2020_04_01.TrafficAnalyticsProperties; import com.microsoft.azure.management.network.v2020_04_01.ProvisioningState; class FlowLogImpl extends CreatableUpdatableImpl<FlowLog, FlowLogInner, FlowLogImpl> implements FlowLog, FlowLog.Definition, FlowLog.Update { private final NetworkManager manager; private String resourceGroupName; private String networkWatcherName; private String flowLogName; FlowLogImpl(String name, NetworkManager manager) { super(name, new FlowLogInner()); this.manager = manager; // Set resource name this.flowLogName = name; // } FlowLogImpl(FlowLogInner inner, NetworkManager manager) { super(inner.name(), inner); this.manager = manager; // Set resource name this.flowLogName = inner.name(); // set resource ancestor and positional variables this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups"); this.networkWatcherName = IdParsingUtils.getValueFromIdByName(inner.id(), "networkWatchers"); this.flowLogName = IdParsingUtils.getValueFromIdByName(inner.id(), "flowLogs"); // } @Override public NetworkManager manager() { return this.manager; } @Override public Observable<FlowLog> createResourceAsync() { FlowLogsInner client = this.manager().inner().flowLogs(); return client.createOrUpdateAsync(this.resourceGroupName, this.networkWatcherName, this.flowLogName, this.inner()) .map(innerToFluentMap(this)); } @Override public Observable<FlowLog> updateResourceAsync() { FlowLogsInner client = this.manager().inner().flowLogs(); return client.createOrUpdateAsync(this.resourceGroupName, this.networkWatcherName, this.flowLogName, this.inner()) .map(innerToFluentMap(this)); } @Override protected Observable<FlowLogInner> getInnerAsync() { FlowLogsInner client = this.manager().inner().flowLogs(); return client.getAsync(this.resourceGroupName, this.networkWatcherName, this.flowLogName); } @Override public boolean isInCreateMode() { return this.inner().id() == null; } @Override public Boolean enabled() { return this.inner().enabled(); } @Override public String etag() { return this.inner().etag(); } @Override public TrafficAnalyticsProperties flowAnalyticsConfiguration() { return this.inner().flowAnalyticsConfiguration(); } @Override public FlowLogFormatParameters format() { return this.inner().format(); } @Override public String id() { return this.inner().id(); } @Override public String location() { return this.inner().location(); } @Override public String name() { return this.inner().name(); } @Override public ProvisioningState provisioningState() { return this.inner().provisioningState(); } @Override public RetentionPolicyParameters retentionPolicy() { return this.inner().retentionPolicy(); } @Override public String storageId() { return this.inner().storageId(); } @Override public Map<String, String> tags() { return this.inner().getTags(); } @Override public String targetResourceGuid() { return this.inner().targetResourceGuid(); } @Override public String targetResourceId() { return this.inner().targetResourceId(); } @Override public String type() { return this.inner().type(); } @Override public FlowLogImpl withExistingNetworkWatcher(String resourceGroupName, String networkWatcherName) { this.resourceGroupName = resourceGroupName; this.networkWatcherName = networkWatcherName; return this; } @Override public FlowLogImpl withStorageId(String storageId) { this.inner().withStorageId(storageId); return this; } @Override public FlowLogImpl withTargetResourceId(String targetResourceId) { this.inner().withTargetResourceId(targetResourceId); return this; } @Override public FlowLogImpl withEnabled(Boolean enabled) { this.inner().withEnabled(enabled); return this; } @Override public FlowLogImpl withFlowAnalyticsConfiguration(TrafficAnalyticsProperties flowAnalyticsConfiguration) { this.inner().withFlowAnalyticsConfiguration(flowAnalyticsConfiguration); return this; } @Override public FlowLogImpl withFormat(FlowLogFormatParameters format) { this.inner().withFormat(format); return this; } @Override public FlowLogImpl withId(String id) { this.inner().withId(id); return this; } @Override public FlowLogImpl withLocation(String location) { this.inner().withLocation(location); return this; } @Override public FlowLogImpl withRetentionPolicy(RetentionPolicyParameters retentionPolicy) { this.inner().withRetentionPolicy(retentionPolicy); return this; } @Override public FlowLogImpl withTags(Map<String, String> tags) { this.inner().withTags(tags); return this; } }
import java.awt.*; import java.awt.event.*; import javax.swing.*; import java.util.Scanner; /** * TODO * * NORMAL CALCULATION METHOD * TRANSITION / ANIMATION INTERFACE * MATERIALS * RECODE IDX CLASS */ class Viewer{ public static void getUserInput(){ Scanner scanner = new Scanner(System.in); while (scanner.hasNext()) { String txt = scanner.next(); if(txt.equals("open")){ rawMesh = Parser.parseObjFile( scanner.next() ); //app.setTitle( rawMesh.name ); if(rawMesh.isEmpty()){ System.out.println("import failed"); }else{ System.out.println("import successfull"); //app = new GuiFrame(512,512); app.setTitle( rawMesh.name ); app.view.matrix.setScale( 96 ); app.view.matrix.setRot( -Math.PI, 0, 0); app.view.matrix.rotateMx(); app.view.setCamera(); app.drawCanvas(); } continue; } if(txt.equals("rot")){ double a = (scanner.nextInt()*Math.PI)/180; double b = (scanner.nextInt()*Math.PI)/180; double c = (scanner.nextInt()*Math.PI)/180; app.view.matrix.setRot(a,b,c); app.view.setCamera(); app.drawCanvas(); } if(txt.equals("rot+")){ double a = (scanner.nextInt()*Math.PI)/180; double b = (scanner.nextInt()*Math.PI)/180; double c = (scanner.nextInt()*Math.PI)/180; app.view.matrix.setRot_relative(a,b,c); app.view.setCamera(); app.drawCanvas(); } if(txt.equals("scale")){ app.view.matrix.setScale( scanner.nextDouble() ); app.view.setCamera(); app.drawCanvas(); } if(txt.equals("scale+")){ app.view.matrix.setScale_relative( scanner.nextDouble() ); app.view.setCamera(); app.drawCanvas(); } if(txt.equals("getRotation")){ double[] r = app.view.matrix.copyRotation(); int x,y,z; x=(int)((r[0]*180)/Math.PI); y=(int)((r[1]*180)/Math.PI); z=(int)((r[2]*180)/Math.PI); System.out.println(" { "+x+", "+y+", "+z+"}"); } } } public static Mesh rawMesh; public static GuiFrame app; public static void main(String[] args) throws InterruptedException{ app = new GuiFrame(512,512); String filepath = "/Users/nilsmartel/git/Fiberlight/src/suz.obj"; if(args.length > 0){ filepath = args[0]; } rawMesh = Parser.parseObjFile( filepath ); if(rawMesh.isEmpty()){ System.out.println("import .obj file using 'open' command"); }else{ app.setTitle( rawMesh.name ); int r1=1; double r2=32; while(r1++ < r2){ double fac = Math.pow((r1/r2),2); app.view.matrix.setScale( 96*fac ); app.view.matrix.setRot( -Math.PI*fac, 1.00002*(1-fac), 0.7*(1-fac)); app.view.matrix.rotateMx(); app.view.setCamera(); app.drawCanvas(); Thread.sleep(40); } } getUserInput(); TextureStack.loadTextureMaps(); } } class GuiFrame{ int width, height; private final int height_offset; JFrame frame; int labelHeight = 16; JLabel label; Camera view; Canvas can; //double scale= 64; int cur_x=0; int cur_y=0; int cur_dx=0; int cur_dy=0; public GuiFrame(){ this(512,512); } public GuiFrame( int width, int height ){ this.width = width; this.height= height; this.height_offset = this.getHeightOffset(); this.can =new Canvas( width, height ); this.view = new Camera( width, height ); this.frame = new JFrame(); this.frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); this.frame.setLayout(null); this.frame.add(this.can); this.frame.setSize( this.can.getWidth(), this.can.getHeight() + this.height_offset + this.labelHeight); this.label = new JLabel("Hello World"); this.frame.getContentPane().setLayout(new BorderLayout()); this.frame.getContentPane().add("South", label); frame.addMouseMotionListener(new MouseMotionAdapter() { public void mouseMoved(MouseEvent me) { //String info = "[mouseMoved ]"; cur_x = me.getX(); cur_y = me.getY() - height_offset; //info+=" x: " + cur_x + " |y: " + cur_y; //label.setText(info); } public void mouseDragged(MouseEvent me) { //String info = "[mouseDragged ]"; if(Viewer.rawMesh.isEmpty()) return; cur_dx = me.getX() -cur_x; cur_dy = (me.getY() - height_offset) -cur_y; cur_x = me.getX(); cur_y = me.getY() - height_offset; view.matrix.setRot_relative( -cur_dy*.01, cur_dx*.02, 0); view.setCamera(); drawCanvas(); //info+=" x: " + cur_dx + " |y: " + cur_dy; //label.setText(info); } }); this.frame.setVisible(true); } int getHeightOffset(){ String os_name = System.getProperty("os.name"); if(os_name.indexOf("Mac OS X") >= 0){ return 22; } return 0; } void showText(String txt) throws InterruptedException{ this.label.setText(txt); int i; for(i=0;i< this.labelHeight; i++ ){ this.frame.setSize( this.can.getWidth(), this.can.getHeight() + this.height_offset + i); Thread.sleep(30); } Thread.sleep(5000); for(; i>0;i--){ this.frame.setSize( this.can.getWidth(), this.can.getHeight() + this.height_offset + i); Thread.sleep(30); } this.frame.setSize( this.can.getWidth(), this.can.getHeight() + this.height_offset); } void setTitle( String title ){ this.frame.setTitle( title); } private boolean updateInter = false; void setInterval( int timer ) throws InterruptedException{ if(!this.updateInter){ this.updateInter=true; } while(this.updateInter){ this.onUpdate(); Thread.sleep(timer); } } void stopInterval(){ this.updateInter=false; } int time=0; void onUpdate(){ this.time++; } void drawCanvas(){ Pixel bgColor = new Pixel( 8, 8, 8); for(int x=0; x< this.can.getWidth(); x++){ for(int y=0; y< this.can.getHeight(); y++){ if(this.view.renderPass.isPixel[x][y]){ this.can.idx.setPixel(x,y, PixelShader.nrm( this.view.renderPass.map[x][y] ) ); }else{ this.can.idx.setPixel(x,y, bgColor ); } } } this.can.redraw(); } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package parReg.wbcl.newWan; import hydra.CacheHelper; import hydra.Log; import hydra.MasterController; import hydra.ProcessMgr; import hydra.RegionHelper; import hydra.RemoteTestModule; import hydra.TestConfig; import java.util.ArrayDeque; import java.util.Iterator; import java.util.List; import java.util.Queue; import java.util.concurrent.Executor; import parReg.wbcl.WBCLTestBB; import util.TestException; import util.TestHelper; import util.TxHelper; import com.gemstone.gemfire.cache.Declarable; import com.gemstone.gemfire.cache.EntryExistsException; import com.gemstone.gemfire.cache.EntryNotFoundException; import com.gemstone.gemfire.cache.Operation; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.cache.asyncqueue.AsyncEvent; import com.gemstone.gemfire.cache.asyncqueue.AsyncEventListener; import com.gemstone.gemfire.cache.wan.EventSequenceID; /** MyAsyncEventListener (AsyncEventListener) * * Following steps are done: * 1) Used separate replicated region "dupEventPRegion" to track sequenceID per DistributedMembershipID + ThreadID. * 2) For each event's DistributedMembershipID_ThreadID, if current_SequenceID > last_SequenceId then proceed, else ignore as event is duplicate. * 3) Provide key level synchronisation while processing duplicated keys. I.e. if event k1v1 is received by two vms vm1 and vm2, * we want only one to be processed and other to be ignore. We are using transaction to achieve this. Tx provides key level isolation and * makes processing event and updating sequenceID as atomic operation, so if vm1 is processing k1v1, vm2 will get ConflictException * 4) We should hold on processing the subsequent events till duplicate event is successfully processed i.e. say vm1 received (and processing) k1v1, * whereas vm2 received events k1v1(duplicate) followed by k1v2; then vm2 should ignore k1v1 and wait till vm1 process k1v1. To achieve this, * we loops on ConflictException in vm2 till vm1 successfully processed k1v1 and updated SequenceID in dupEventPRegion. * * @author Rahul Diyewar * @since 7.0 */ public class MyAsyncEventListener implements AsyncEventListener<Object, Object>, Declarable { // updated with current time as each event processed by the WBCLEventListener public static int lastEventTime; /** The process ID of the VM that created this listener */ public int whereIWasRegistered; //protected Executor serialExecutor; /** noArg constructor */ public MyAsyncEventListener() { whereIWasRegistered = ProcessMgr.getProcessId(); // serialExecutor = Executors.newSingleThreadExecutor(); } //---------------------------------------------------------------------------- // GatewayEventListener API //---------------------------------------------------------------------------- /** * process events */ public boolean processEvents(List<AsyncEvent<Object, Object>> events) { boolean status = false; Log.getLogWriter().info("processEvents received List with " + events.size() + " GatewayEvents"); // Fail 10% of the time ... ensure that we replay these events if (TestConfig.tab().getRandGen().nextInt(1, 100) < 99) { status = true; for (Iterator i = events.iterator(); i.hasNext();) { AsyncEvent event = (AsyncEvent)i.next(); logCall("processEvents", event); WBCLTestBB.getBB().getSharedCounters().setIfLarger(WBCLTestBB.lastEventTime, System.currentTimeMillis()); hydra.blackboard.SharedLock lock = null; if(event.getPossibleDuplicate()){ if(null == lock){ lock = WBCLTestBB.getBB().getSharedLock(); lock.lock(); } } //boolean eventProcessed = false; // create dupEventRegion if not available. try{ getDupEventRegion(); // check is event is duplicated if (hasSeenEvent(event)) { Log.getLogWriter().info("Ignoring event as it is already seen: " + event); //TxHelper.rollback(); //eventProcessed = true; } else { try { // use the event to update the local wbcl region final Region wbclRegion = CacheHelper.getCache().getRegion("wbclRegion"); final Object key = event.getKey(); final Object value = event.getDeserializedValue(); final Operation op = event.getOperation(); // serialExecutor.execute(new Runnable() { // public void run() { if (op.isCreate()) { try { Log.getLogWriter().info("Creating key/value pair (" + key + ", " + value + ") in region named " + wbclRegion.getName()); wbclRegion.create(key, value); Log.getLogWriter().info("Done creating key/value pair (" + key + ", " + value + ") in region named " + wbclRegion.getName()); } catch (EntryExistsException e) { Log.getLogWriter().info("Caught " + e + ", expected with concurrent operations; continuing with test"); // Revert this change once #48997 get fixed - Start Log.getLogWriter().info("Since this event falsely re-appeared as create event instead of update, so initiating update event to pass hydra test against #48997"); Log.getLogWriter().info("Putting key/value pair (" + key + ", " + value + ") in region named " + wbclRegion.getName()); if (value == null) { wbclRegion.invalidate(key); } else { wbclRegion.put(key, value); } Log.getLogWriter().info("Done Putting key/value pair (" + key + ", " + value + ") in region named " + wbclRegion.getName()); // Revert this change once #48997 get fixed - End } } else if (op.isUpdate()) { Log.getLogWriter().info("Putting key/value pair (" + key + ", " + value + ") in region named " + wbclRegion.getName()); wbclRegion.put(key, value); Log.getLogWriter().info("Done Putting key/value pair (" + key + ", " + value + ") in region named " + wbclRegion.getName()); } else if (op.isInvalidate()) { throwException("Unexpected INVALIDATE encounted in WBCLEventListener " + op.toString() + ", " + TestHelper.getStackTrace()); } else if (op.isDestroy()) { Log.getLogWriter().info("Destroying key/value pair (" + key + ", " + value + ") in region named " + wbclRegion.getName()); try { wbclRegion.destroy(key); } catch (EntryNotFoundException e) { Log.getLogWriter().info("Caught " + e + ", expected with concurrent operations; continuing with test"); } Log.getLogWriter().info("Done destroying key/value pair (" + key + ", " + value + ") in region named " + wbclRegion.getName()); } // } // }); updateSeenEvent(event); //eventProcessed = true; } catch (Exception e) { status = false; throwException("WBCL Listener caught unexpected Exception " + e + ", " + TestHelper.getStackTrace(e)); } /*if (!eventProcessed) { Log.getLogWriter().info("Event is not applied, someone is still working on same key " + event); MasterController.sleepForMs(5); }*/ } } finally { if(event.getPossibleDuplicate()){ lock.unlock(); } } } } if (status) { Log.getLogWriter().info("WBCLEventListener processed batch of " + events.size() + " events, returning " + status); } else { Log.getLogWriter().info("WBCLEventListener DID NOT process batch of " + events.size() + " events, returning " + status); } return status; } public void init(java.util.Properties prop) { logCall("init(Properties)", null); } public void close() { logCall("close", null); } /** * Return boolean value to validate duplicity of event. * * @param event The event object that was passed to the event. */ public boolean hasSeenEvent(AsyncEvent event) { String key = keyGeneration(event); long currSeqId = event.getEventSequenceID().getSequenceID(); Object lastSeqId = getDupEventRegion().get(key); if (lastSeqId == null) { return false; } else if (((Long)lastSeqId).longValue() < currSeqId) { return false; } return true; } public void updateSeenEvent(AsyncEvent event) { String asyncEventKey = keyGeneration(event); getDupEventRegion().put(asyncEventKey, event.getEventSequenceID().getSequenceID()); Log.getLogWriter().info("Key generated for this event is: " + asyncEventKey); } private String keyGeneration(AsyncEvent event) { EventSequenceID eventSeQId = event.getEventSequenceID(); return eventSeQId.getMembershipID() + "_" + eventSeQId.getThreadID(); } private Region getDupEventRegion(){ String regionName = "dupEventPRegion"; Region region = RegionHelper.getRegion(regionName); if(region == null){ region = RegionHelper.createRegion(regionName); } return region; } /** * Utility method to write an Exception string to the Event Blackboard and * to also throw an exception containing the same string. * * @param errStr String to log, post to EventBB and throw * @throws TestException containing the passed in String * * @see util.TestHelper.checkForEventError */ protected void throwException(String errStr) { hydra.blackboard.SharedMap aMap = event.EventBB.getBB().getSharedMap(); aMap.put(TestHelper.EVENT_ERROR_KEY, errStr + " " + TestHelper.getStackTrace()); Log.getLogWriter().info(errStr); throw new TestException(errStr); } /** Log that a gateway event occurred. * * @param event The event object that was passed to the event. */ public String logCall(String methodName, AsyncEvent event) { String aStr = toString(methodName, event); Log.getLogWriter().info(aStr); return aStr; } /** Return a string description of the GatewayEvent. * * @param event The AsyncEvent object that was passed to the CqListener * * @return A String description of the invoked GatewayEvent */ public String toString(String methodName, AsyncEvent event) { StringBuffer aStr = new StringBuffer(); aStr.append("Invoked " + this.getClass().getName() + ": " + methodName + " in " + RemoteTestModule.getMyClientName()); aStr.append(", whereIWasRegistered: " + whereIWasRegistered); if (event == null) { return aStr.toString(); } aStr.append(", Event:" + event); return aStr.toString(); } /** Inner class for serializing (ordering) application of updates * based on gateway events. */ class SerialExecutor implements Executor { final Queue<Runnable> tasks = new ArrayDeque<Runnable>(); final Executor executor; Runnable active; SerialExecutor(Executor executor) { this.executor = executor; } public synchronized void execute(final Runnable r) { tasks.offer(new Runnable() { public void run() { try { r.run(); } finally { scheduleNext(); } } }); if (active == null) { scheduleNext(); } } protected synchronized void scheduleNext() { if ((active = tasks.poll()) != null) { executor.execute(active); } } } /** Return when no events have been invoked for the given number of seconds. * * @param sleepMS The number of milliseonds to sleep between checks for * silence. */ public static void waitForSilence(long desiredSilenceSec, long sleepMS) { Log.getLogWriter().info("Waiting for a period of silence for " + desiredSilenceSec + " seconds..."); long desiredSilenceMS = desiredSilenceSec * 1000; long silenceStartTime = System.currentTimeMillis(); long currentTime = System.currentTimeMillis(); long lastEventTime = WBCLTestBB.getBB().getSharedCounters().read(WBCLTestBB.lastEventTime); while (currentTime - silenceStartTime < desiredSilenceMS) { try { Thread.sleep(sleepMS); } catch (InterruptedException e) { throw new TestException(TestHelper.getStackTrace(e)); } lastEventTime = WBCLTestBB.getBB().getSharedCounters().read(WBCLTestBB.lastEventTime); if (lastEventTime > silenceStartTime) { // restart the wait silenceStartTime = lastEventTime; } currentTime = System.currentTimeMillis(); } long duration = currentTime - silenceStartTime; Log.getLogWriter().info("Done waiting, clients have been silent for " + duration + " ms"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.document; import java.util.Arrays; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.Set; import java.util.HashSet; import java.util.Map; import java.util.HashMap; import java.io.IOException; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.store.*; import org.apache.lucene.document.*; import org.apache.lucene.analysis.*; import org.apache.lucene.index.*; import org.apache.lucene.search.*; import org.junit.After; import org.junit.Before; public class TestLazyDocument extends LuceneTestCase { public final int NUM_DOCS = atLeast(10); public final String[] FIELDS = new String[] { "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k" }; public final int NUM_VALUES = atLeast(100); public Directory dir = newDirectory(); @After public void removeIndex() { if (null != dir) { try { dir.close(); dir = null; } catch (Exception e) { /* NOOP */ } } } @Before public void createIndex() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); IndexWriter writer = new IndexWriter (dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); try { for (int docid = 0; docid < NUM_DOCS; docid++) { Document d = new Document(); d.add(newStringField("docid", ""+docid, Field.Store.YES)); d.add(newStringField("never_load", "fail", Field.Store.YES)); for (String f : FIELDS) { for (int val = 0; val < NUM_VALUES; val++) { d.add(newStringField(f, docid+"_"+f+"_"+val, Field.Store.YES)); } } d.add(newStringField("load_later", "yes", Field.Store.YES)); writer.addDocument(d); } } finally { writer.close(); } } public void testLazy() throws Exception { final int id = random().nextInt(NUM_DOCS); IndexReader reader = DirectoryReader.open(dir); try { Query q = new TermQuery(new Term("docid", ""+id)); IndexSearcher searcher = new IndexSearcher(reader); ScoreDoc[] hits = searcher.search(q, 100).scoreDocs; assertEquals("Too many docs", 1, hits.length); LazyTestingStoredFieldVisitor visitor = new LazyTestingStoredFieldVisitor(new LazyDocument(reader, hits[0].doc), FIELDS); reader.document(hits[0].doc, visitor); Document d = visitor.doc; int numFieldValues = 0; Map<String,Integer> fieldValueCounts = new HashMap<String,Integer>(); // at this point, all FIELDS should be Lazy and unrealized for (IndexableField f : d) { numFieldValues++; if (f.name().equals("never_load")) { fail("never_load was loaded"); } if (f.name().equals("load_later")) { fail("load_later was loaded on first pass"); } if (f.name().equals("docid")) { assertFalse(f.name(), f instanceof LazyDocument.LazyField); } else { int count = fieldValueCounts.containsKey(f.name()) ? fieldValueCounts.get(f.name()) : 0; count++; fieldValueCounts.put(f.name(), count); assertTrue(f.name() + " is " + f.getClass(), f instanceof LazyDocument.LazyField); LazyDocument.LazyField lf = (LazyDocument.LazyField) f; assertFalse(f.name() + " is loaded", lf.hasBeenLoaded()); } } System.out.println("numFieldValues == " + numFieldValues); assertEquals("numFieldValues", 1 + (NUM_VALUES * FIELDS.length), numFieldValues); for (String fieldName : fieldValueCounts.keySet()) { assertEquals("fieldName count: " + fieldName, NUM_VALUES, (int)fieldValueCounts.get(fieldName)); } // pick a single field name to load a single value final String fieldName = FIELDS[random().nextInt(FIELDS.length)]; final IndexableField[] fieldValues = d.getFields(fieldName); assertEquals("#vals in field: " + fieldName, NUM_VALUES, fieldValues.length); final int valNum = random().nextInt(fieldValues.length); assertEquals(id + "_" + fieldName + "_" + valNum, fieldValues[valNum].stringValue()); // now every value of fieldName should be loaded for (IndexableField f : d) { if (f.name().equals("never_load")) { fail("never_load was loaded"); } if (f.name().equals("load_later")) { fail("load_later was loaded too soon"); } if (f.name().equals("docid")) { assertFalse(f.name(), f instanceof LazyDocument.LazyField); } else { assertTrue(f.name() + " is " + f.getClass(), f instanceof LazyDocument.LazyField); LazyDocument.LazyField lf = (LazyDocument.LazyField) f; assertEquals(f.name() + " is loaded?", lf.name().equals(fieldName), lf.hasBeenLoaded()); } } // use the same LazyDoc to ask for one more lazy field visitor = new LazyTestingStoredFieldVisitor(new LazyDocument(reader, hits[0].doc), "load_later"); reader.document(hits[0].doc, visitor); d = visitor.doc; // ensure we have all the values we expect now, and that // adding one more lazy field didn't "unload" the existing LazyField's // we already loaded. for (IndexableField f : d) { if (f.name().equals("never_load")) { fail("never_load was loaded"); } if (f.name().equals("docid")) { assertFalse(f.name(), f instanceof LazyDocument.LazyField); } else { assertTrue(f.name() + " is " + f.getClass(), f instanceof LazyDocument.LazyField); LazyDocument.LazyField lf = (LazyDocument.LazyField) f; assertEquals(f.name() + " is loaded?", lf.name().equals(fieldName), lf.hasBeenLoaded()); } } // even the underlying doc shouldn't have never_load assertNull("never_load was loaded in wrapped doc", visitor.lazyDoc.getDocument().getField("never_load")); } finally { reader.close(); } } private static class LazyTestingStoredFieldVisitor extends StoredFieldVisitor { public final Document doc = new Document(); public final LazyDocument lazyDoc; public final Set<String> lazyFieldNames; LazyTestingStoredFieldVisitor(LazyDocument l, String... fields) { lazyDoc = l; lazyFieldNames = new HashSet<String>(Arrays.asList(fields)); } @Override public Status needsField(FieldInfo fieldInfo) { if (fieldInfo.name.equals("docid")) { return Status.YES; } else if (fieldInfo.name.equals("never_load")) { return Status.NO; } else { if (lazyFieldNames.contains(fieldInfo.name)) { doc.add(lazyDoc.getField(fieldInfo)); } } return Status.NO; } @Override public void stringField(FieldInfo fieldInfo, String value) throws IOException { final FieldType ft = new FieldType(TextField.TYPE_STORED); ft.setStoreTermVectors(fieldInfo.hasVectors()); ft.setIndexed(fieldInfo.isIndexed()); ft.setOmitNorms(fieldInfo.omitsNorms()); ft.setIndexOptions(fieldInfo.getIndexOptions()); doc.add(new Field(fieldInfo.name, value, ft)); } } }
/* * Copyright 2014 Kaazing Corporation, All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaazing.nuklei.protocol.tcp; import org.kaazing.nuklei.concurrent.ringbuffer.mpsc.MpscRingBufferWriter; import uk.co.real_logic.agrona.BitUtil; import uk.co.real_logic.agrona.MutableDirectBuffer; import uk.co.real_logic.agrona.concurrent.UnsafeBuffer; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.SocketChannel; /** */ public class TcpConnection { public static final int MAX_RECEIVE_LENGTH = 4096; private final SocketChannel channel; private final MpscRingBufferWriter receiveWriter; private final long id; private final ByteBuffer receiveByteBuffer; private final MutableDirectBuffer atomicBuffer; private final MutableDirectBuffer informBuffer = new UnsafeBuffer(ByteBuffer.allocateDirect(BitUtil.SIZE_OF_LONG)); // TODO: these will false share most likely private volatile boolean senderClosed = false; private volatile boolean receiverClosed = false; private boolean closed = false; // connect version public TcpConnection( final long id, final InetSocketAddress localAddress, final MpscRingBufferWriter receiveWriter) { try { channel = SocketChannel.open(); this.id = id; this.receiveWriter = receiveWriter; channel.bind(localAddress); channel.configureBlocking(false); receiveByteBuffer = ByteBuffer.allocateDirect(MAX_RECEIVE_LENGTH).order(ByteOrder.nativeOrder()); atomicBuffer = new UnsafeBuffer(receiveByteBuffer); // connect() and management is done by caller } catch (final IOException ex) { throw new RuntimeException(ex); } } // accepted version public TcpConnection( final SocketChannel channel, final long id, final MpscRingBufferWriter receiveWriter) { this.channel = channel; this.id = id; this.receiveWriter = receiveWriter; receiveByteBuffer = ByteBuffer.allocateDirect(MAX_RECEIVE_LENGTH).order(ByteOrder.nativeOrder()); atomicBuffer = new UnsafeBuffer(receiveByteBuffer); } public SocketChannel channel() { return channel; } public long id() { return id; } public void close() { closed = true; try { channel.close(); } catch (final Exception ex) { throw new RuntimeException(ex); } } public void send(final ByteBuffer buffer) { try { final int length = buffer.remaining(); final int sent = channel.write(buffer); if (sent < length) { // TODO: finish by handling appropriately with throw new IllegalStateException("could not send all of buffer: " + sent + "/" + length); // temporary } } catch (final Exception ex) { ex.printStackTrace(); // TODO: temp } } public int onReadable() { try { receiveByteBuffer.clear(); receiveByteBuffer.putLong(id); final int length = channel.read(receiveByteBuffer); if (-1 == length) { if (!receiveWriter.write(TcpManagerTypeId.EOF, atomicBuffer, 0, BitUtil.SIZE_OF_LONG)) { throw new IllegalStateException("could not write to receive buffer"); } return -1; // signal selector to cancel OP_READ and short circuit the rest here } if (!receiveWriter.write(TcpManagerTypeId.RECEIVED_DATA, atomicBuffer, 0, length + BitUtil.SIZE_OF_LONG)) { throw new IllegalStateException("could not write to receive buffer"); } } catch (final Exception ex) { ex.printStackTrace(); // TODO: temp } return 0; } public int onWritable() { return 0; } public void senderClosed() { senderClosed = true; } public boolean hasSenderClosed() { return senderClosed; } public void receiverClosed() { receiverClosed = true; } public boolean hasReceiverClosed() { return receiverClosed; } public boolean isClosed() { return closed; } public MpscRingBufferWriter receiveWriter() { return receiveWriter; } public void informOfNewConnection() { informBuffer.putLong(0, id); if (!receiveWriter.write(TcpManagerTypeId.NEW_CONNECTION, informBuffer, 0, BitUtil.SIZE_OF_LONG)) { throw new IllegalStateException("could not write to receive buffer"); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.bulk; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.DocumentRequest; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.IndexMissingException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; /** * */ public class TransportBulkAction extends HandledTransportAction<BulkRequest, BulkResponse> { private final AutoCreateIndex autoCreateIndex; private final boolean allowIdGeneration; private final ClusterService clusterService; private final TransportShardBulkAction shardBulkAction; private final TransportCreateIndexAction createIndexAction; @Inject public TransportBulkAction(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterService clusterService, TransportShardBulkAction shardBulkAction, TransportCreateIndexAction createIndexAction, ActionFilters actionFilters) { super(settings, BulkAction.NAME, threadPool, transportService, actionFilters); this.clusterService = clusterService; this.shardBulkAction = shardBulkAction; this.createIndexAction = createIndexAction; this.autoCreateIndex = new AutoCreateIndex(settings); this.allowIdGeneration = componentSettings.getAsBoolean("action.allow_id_generation", true); } @Override public BulkRequest newRequestInstance(){ return new BulkRequest(); } @Override protected void doExecute(final BulkRequest bulkRequest, final ActionListener<BulkResponse> listener) { final long startTime = System.currentTimeMillis(); final AtomicArray<BulkItemResponse> responses = new AtomicArray<>(bulkRequest.requests.size()); if (autoCreateIndex.needToCheck()) { // Keep track of all unique indices and all unique types per index for the create index requests: final Map<String, Set<String>> indicesAndTypes = new HashMap<>(); for (ActionRequest request : bulkRequest.requests) { if (request instanceof DocumentRequest) { DocumentRequest req = (DocumentRequest) request; Set<String> types = indicesAndTypes.get(req.index()); if (types == null) { indicesAndTypes.put(req.index(), types = new HashSet<>()); } types.add(req.type()); } else { throw new ElasticsearchException("Parsed unknown request in bulk actions: " + request.getClass().getSimpleName()); } } final AtomicInteger counter = new AtomicInteger(indicesAndTypes.size()); ClusterState state = clusterService.state(); for (Map.Entry<String, Set<String>> entry : indicesAndTypes.entrySet()) { final String index = entry.getKey(); if (autoCreateIndex.shouldAutoCreate(index, state)) { CreateIndexRequest createIndexRequest = new CreateIndexRequest(bulkRequest); createIndexRequest.index(index); for (String type : entry.getValue()) { createIndexRequest.mapping(type); } createIndexRequest.cause("auto(bulk api)"); createIndexRequest.masterNodeTimeout(bulkRequest.timeout()); createIndexAction.execute(createIndexRequest, new ActionListener<CreateIndexResponse>() { @Override public void onResponse(CreateIndexResponse result) { if (counter.decrementAndGet() == 0) { try { executeBulk(bulkRequest, startTime, listener, responses); } catch (Throwable t) { listener.onFailure(t); } } } @Override public void onFailure(Throwable e) { if (!(ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException)) { // fail all requests involving this index, if create didnt work for (int i = 0; i < bulkRequest.requests.size(); i++) { ActionRequest request = bulkRequest.requests.get(i); if (request != null && setResponseFailureIfIndexMatches(responses, i, request, index, e)) { bulkRequest.requests.set(i, null); } } } if (counter.decrementAndGet() == 0) { try { executeBulk(bulkRequest, startTime, listener, responses); } catch (Throwable t) { listener.onFailure(t); } } } }); } else { if (counter.decrementAndGet() == 0) { executeBulk(bulkRequest, startTime, listener, responses); } } } } else { executeBulk(bulkRequest, startTime, listener, responses); } } private boolean setResponseFailureIfIndexMatches(AtomicArray<BulkItemResponse> responses, int idx, ActionRequest request, String index, Throwable e) { if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; if (index.equals(indexRequest.index())) { responses.set(idx, new BulkItemResponse(idx, "index", new BulkItemResponse.Failure(indexRequest.index(), indexRequest.type(), indexRequest.id(), e))); return true; } } else if (request instanceof DeleteRequest) { DeleteRequest deleteRequest = (DeleteRequest) request; if (index.equals(deleteRequest.index())) { responses.set(idx, new BulkItemResponse(idx, "index", new BulkItemResponse.Failure(deleteRequest.index(), deleteRequest.type(), deleteRequest.id(), e))); return true; } } else if (request instanceof UpdateRequest) { UpdateRequest updateRequest = (UpdateRequest) request; if (index.equals(updateRequest.index())) { responses.set(idx, new BulkItemResponse(idx, "index", new BulkItemResponse.Failure(updateRequest.index(), updateRequest.type(), updateRequest.id(), e))); return true; } } else { throw new ElasticsearchException("Parsed unknown request in bulk actions: " + request.getClass().getSimpleName()); } return false; } /** * This method executes the {@link BulkRequest} and calls the given listener once the request returns. * This method will not create any indices even if auto-create indices is enabled. * * @see #doExecute(BulkRequest, org.elasticsearch.action.ActionListener) */ public void executeBulk(final BulkRequest bulkRequest, final ActionListener<BulkResponse> listener) { final long startTime = System.currentTimeMillis(); executeBulk(bulkRequest, startTime, listener, new AtomicArray<BulkItemResponse>(bulkRequest.requests.size())); } private final long buildTookInMillis(long startTime) { // protect ourselves against time going backwards return Math.max(1, System.currentTimeMillis() - startTime); } private void executeBulk(final BulkRequest bulkRequest, final long startTime, final ActionListener<BulkResponse> listener, final AtomicArray<BulkItemResponse> responses ) { final ClusterState clusterState = clusterService.state(); // TODO use timeout to wait here if its blocked... clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.WRITE); final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState.metaData()); MetaData metaData = clusterState.metaData(); for (int i = 0; i < bulkRequest.requests.size(); i++) { ActionRequest request = bulkRequest.requests.get(i); if (request instanceof DocumentRequest) { DocumentRequest req = (DocumentRequest) request; if (addFailureIfIndexIsUnavailable(req, bulkRequest, responses, i, concreteIndices, metaData)) { continue; } String concreteIndex = concreteIndices.resolveIfAbsent(req.index(), req.indicesOptions()); if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; MappingMetaData mappingMd = null; if (metaData.hasIndex(concreteIndex)) { mappingMd = metaData.index(concreteIndex).mappingOrDefault(indexRequest.type()); } try { indexRequest.process(metaData, mappingMd, allowIdGeneration, concreteIndex); } catch (ElasticsearchParseException | RoutingMissingException e) { BulkItemResponse.Failure failure = new BulkItemResponse.Failure(concreteIndex, indexRequest.type(), indexRequest.id(), e); BulkItemResponse bulkItemResponse = new BulkItemResponse(i, "index", failure); responses.set(i, bulkItemResponse); // make sure the request gets never processed again bulkRequest.requests.set(i, null); } } else { concreteIndices.resolveIfAbsent(req.index(), req.indicesOptions()); req.routing(clusterState.metaData().resolveIndexRouting(req.routing(), req.index())); } } } // first, go over all the requests and create a ShardId -> Operations mapping Map<ShardId, List<BulkItemRequest>> requestsByShard = Maps.newHashMap(); for (int i = 0; i < bulkRequest.requests.size(); i++) { ActionRequest request = bulkRequest.requests.get(i); if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; String concreteIndex = concreteIndices.getConcreteIndex(indexRequest.index()); ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, indexRequest.type(), indexRequest.id(), indexRequest.routing()).shardId(); List<BulkItemRequest> list = requestsByShard.get(shardId); if (list == null) { list = Lists.newArrayList(); requestsByShard.put(shardId, list); } list.add(new BulkItemRequest(i, request)); } else if (request instanceof DeleteRequest) { DeleteRequest deleteRequest = (DeleteRequest) request; String concreteIndex = concreteIndices.getConcreteIndex(deleteRequest.index()); MappingMetaData mappingMd = clusterState.metaData().index(concreteIndex).mappingOrDefault(deleteRequest.type()); if (mappingMd != null && mappingMd.routing().required() && deleteRequest.routing() == null) { // if routing is required, and no routing on the delete request, we need to broadcast it.... GroupShardsIterator groupShards = clusterService.operationRouting().broadcastDeleteShards(clusterState, concreteIndex); for (ShardIterator shardIt : groupShards) { List<BulkItemRequest> list = requestsByShard.get(shardIt.shardId()); if (list == null) { list = Lists.newArrayList(); requestsByShard.put(shardIt.shardId(), list); } list.add(new BulkItemRequest(i, new DeleteRequest(deleteRequest))); } } else { ShardId shardId = clusterService.operationRouting().deleteShards(clusterState, concreteIndex, deleteRequest.type(), deleteRequest.id(), deleteRequest.routing()).shardId(); List<BulkItemRequest> list = requestsByShard.get(shardId); if (list == null) { list = Lists.newArrayList(); requestsByShard.put(shardId, list); } list.add(new BulkItemRequest(i, request)); } } else if (request instanceof UpdateRequest) { UpdateRequest updateRequest = (UpdateRequest) request; String concreteIndex = concreteIndices.getConcreteIndex(updateRequest.index()); MappingMetaData mappingMd = clusterState.metaData().index(concreteIndex).mappingOrDefault(updateRequest.type()); if (mappingMd != null && mappingMd.routing().required() && updateRequest.routing() == null) { BulkItemResponse.Failure failure = new BulkItemResponse.Failure(updateRequest.index(), updateRequest.type(), updateRequest.id(), "routing is required for this item", RestStatus.BAD_REQUEST); responses.set(i, new BulkItemResponse(i, updateRequest.type(), failure)); continue; } ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, updateRequest.type(), updateRequest.id(), updateRequest.routing()).shardId(); List<BulkItemRequest> list = requestsByShard.get(shardId); if (list == null) { list = Lists.newArrayList(); requestsByShard.put(shardId, list); } list.add(new BulkItemRequest(i, request)); } } if (requestsByShard.isEmpty()) { listener.onResponse(new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTime))); return; } final AtomicInteger counter = new AtomicInteger(requestsByShard.size()); for (Map.Entry<ShardId, List<BulkItemRequest>> entry : requestsByShard.entrySet()) { final ShardId shardId = entry.getKey(); final List<BulkItemRequest> requests = entry.getValue(); BulkShardRequest bulkShardRequest = new BulkShardRequest(bulkRequest, shardId.index().name(), shardId.id(), bulkRequest.refresh(), requests.toArray(new BulkItemRequest[requests.size()])); bulkShardRequest.replicationType(bulkRequest.replicationType()); bulkShardRequest.consistencyLevel(bulkRequest.consistencyLevel()); bulkShardRequest.timeout(bulkRequest.timeout()); shardBulkAction.execute(bulkShardRequest, new ActionListener<BulkShardResponse>() { @Override public void onResponse(BulkShardResponse bulkShardResponse) { for (BulkItemResponse bulkItemResponse : bulkShardResponse.getResponses()) { responses.set(bulkItemResponse.getItemId(), bulkItemResponse); } if (counter.decrementAndGet() == 0) { finishHim(); } } @Override public void onFailure(Throwable e) { // create failures for all relevant requests String message = ExceptionsHelper.detailedMessage(e); RestStatus status = ExceptionsHelper.status(e); for (BulkItemRequest request : requests) { if (request.request() instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request.request(); responses.set(request.id(), new BulkItemResponse(request.id(), indexRequest.opType().toString().toLowerCase(Locale.ENGLISH), new BulkItemResponse.Failure(concreteIndices.getConcreteIndex(indexRequest.index()), indexRequest.type(), indexRequest.id(), message, status))); } else if (request.request() instanceof DeleteRequest) { DeleteRequest deleteRequest = (DeleteRequest) request.request(); responses.set(request.id(), new BulkItemResponse(request.id(), "delete", new BulkItemResponse.Failure(concreteIndices.getConcreteIndex(deleteRequest.index()), deleteRequest.type(), deleteRequest.id(), message, status))); } else if (request.request() instanceof UpdateRequest) { UpdateRequest updateRequest = (UpdateRequest) request.request(); responses.set(request.id(), new BulkItemResponse(request.id(), "update", new BulkItemResponse.Failure(concreteIndices.getConcreteIndex(updateRequest.index()), updateRequest.type(), updateRequest.id(), message, status))); } } if (counter.decrementAndGet() == 0) { finishHim(); } } private void finishHim() { listener.onResponse(new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTime))); } }); } } private boolean addFailureIfIndexIsUnavailable(DocumentRequest request, BulkRequest bulkRequest, AtomicArray<BulkItemResponse> responses, int idx, final ConcreteIndices concreteIndices, final MetaData metaData) { String concreteIndex = concreteIndices.getConcreteIndex(request.index()); Exception unavailableException = null; if (concreteIndex == null) { try { concreteIndex = concreteIndices.resolveIfAbsent(request.index(), request.indicesOptions()); } catch (IndexClosedException ice) { unavailableException = ice; } catch (IndexMissingException ime) { // Fix for issue where bulk request references an index that // cannot be auto-created see issue #8125 unavailableException = ime; } } if (unavailableException == null) { IndexMetaData indexMetaData = metaData.index(concreteIndex); if (indexMetaData.getState() == IndexMetaData.State.CLOSE) { unavailableException = new IndexClosedException(new Index(metaData.index(request.index()).getIndex())); } } if (unavailableException != null) { BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.type(), request.id(), unavailableException); BulkItemResponse bulkItemResponse = new BulkItemResponse(idx, "index", failure); responses.set(idx, bulkItemResponse); // make sure the request gets never processed again bulkRequest.requests.set(idx, null); return true; } return false; } private static class ConcreteIndices { private final Map<String, String> indices = new HashMap<>(); private final MetaData metaData; ConcreteIndices(MetaData metaData) { this.metaData = metaData; } String getConcreteIndex(String indexOrAlias) { return indices.get(indexOrAlias); } String resolveIfAbsent(String indexOrAlias, IndicesOptions indicesOptions) { String concreteIndex = indices.get(indexOrAlias); if (concreteIndex == null) { concreteIndex = metaData.concreteSingleIndex(indexOrAlias, indicesOptions); indices.put(indexOrAlias, concreteIndex); } return concreteIndex; } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.refactoring.classes.membersManager; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; import com.google.common.collect.FluentIterable; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiReference; import com.intellij.util.containers.MultiMap; import com.jetbrains.NotNullPredicate; import com.jetbrains.python.PyNames; import com.jetbrains.python.codeInsight.imports.AddImportHelper; import com.jetbrains.python.codeInsight.imports.AddImportHelper.ImportPriority; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyFunctionBuilder; import com.jetbrains.python.psi.types.TypeEvalContext; import com.jetbrains.python.refactoring.classes.PyClassRefactoringUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * Plugin that moves class methods * * @author Ilya.Kazakevich */ class MethodsManager extends MembersManager<PyFunction> { /** * Some decorators should be copied with methods if method is marked abstract. Here is list. */ private static final String[] DECORATORS_MAY_BE_COPIED_TO_ABSTRACT = {PyNames.PROPERTY, PyNames.CLASSMETHOD, PyNames.STATICMETHOD}; public static final String ABC_META_PACKAGE = "abc"; private static final NoPropertiesPredicate NO_PROPERTIES = new NoPropertiesPredicate(); MethodsManager() { super(PyFunction.class); } @Override public boolean hasConflict(@NotNull final PyFunction member, @NotNull final PyClass aClass) { return NamePredicate.hasElementWithSameName(member, Arrays.asList(aClass.getMethods())); } @NotNull @Override protected Collection<PyElement> getDependencies(@NotNull final MultiMap<PyClass, PyElement> usedElements) { return Collections.emptyList(); } @NotNull @Override protected MultiMap<PyClass, PyElement> getDependencies(@NotNull final PyElement member) { final MyPyRecursiveElementVisitor visitor = new MyPyRecursiveElementVisitor(); member.accept(visitor); return visitor.myResult; } @NotNull @Override protected List<? extends PyElement> getMembersCouldBeMoved(@NotNull final PyClass pyClass) { return FluentIterable.from(Arrays.asList(pyClass.getMethods())).filter(new NamelessFilter<>()).filter(NO_PROPERTIES).toList(); } @Override protected Collection<PyElement> moveMembers(@NotNull final PyClass from, @NotNull final Collection<PyMemberInfo<PyFunction>> members, final PyClass @NotNull ... to) { final Collection<PyFunction> methodsToMove = fetchElements(Collections2.filter(members, new AbstractFilter(false))); final Collection<PyFunction> methodsToAbstract = fetchElements(Collections2.filter(members, new AbstractFilter(true))); makeMethodsAbstract(methodsToAbstract, to); return moveMethods(from, methodsToMove, true, to); } /** * Creates abstract version of each method in each class (does not touch method itself as opposite to {@link #moveMethods(PyClass, Collection, boolean, PyClass...)}) * * @param currentFunctions functions to make them abstract * @param to classes where abstract method should be created */ private static void makeMethodsAbstract(final Collection<PyFunction> currentFunctions, final PyClass... to) { final Set<PsiFile> filesToCheckImport = new HashSet<>(); final Set<PyClass> classesToAddMetaAbc = new HashSet<>(); for (final PyFunction function : currentFunctions) { for (final PyClass destClass : to) { final PyFunctionBuilder functionBuilder = PyFunctionBuilder.copySignature(function, DECORATORS_MAY_BE_COPIED_TO_ABSTRACT); functionBuilder.decorate(PyNames.ABSTRACTMETHOD); PyClassRefactoringUtil.addMethods(destClass, false, functionBuilder.buildFunction()); classesToAddMetaAbc.add(destClass); } } // Add ABCMeta to new classes if needed for (final PyClass aClass : classesToAddMetaAbc) { final Project project = aClass.getProject(); final PsiFile file = aClass.getContainingFile(); final PyClass abcMetaClass = PyPsiFacade.getInstance(project).createClassByQName(PyNames.ABC_META, aClass); final TypeEvalContext context = TypeEvalContext.userInitiated(project, file); if (abcMetaClass != null && PyClassRefactoringUtil.addMetaClassIfNotExist(aClass, abcMetaClass, context)) { filesToCheckImport.add(file); } } // Add imports for ABC if needed for (final PsiFile file : filesToCheckImport) { AddImportHelper.addOrUpdateFromImportStatement(file, ABC_META_PACKAGE, PyNames.ABSTRACTMETHOD, null, ImportPriority.BUILTIN, null); PyClassRefactoringUtil.optimizeImports(file); //To remove redundant imports } } /** * Moves methods (as opposite to {@link #makeMethodsAbstract(Collection, PyClass...)}) * * @param from source * @param methodsToMove what to move * @param to where * @param skipIfExist skip (do not add) if method already exists * @return newly added methods */ static List<PyElement> moveMethods(final PyClass from, final Collection<? extends PyFunction> methodsToMove, final boolean skipIfExist, final PyClass... to) { final List<PyElement> result = new ArrayList<>(); for (final PyClass destClass : to) { //We move copies here because there may be several destinations final List<PyFunction> copies = new ArrayList<>(methodsToMove.size()); for (final PyFunction element : methodsToMove) { final PyFunction newMethod = (PyFunction)element.copy(); copies.add(newMethod); } result.addAll(PyClassRefactoringUtil.copyMethods(copies, destClass, skipIfExist)); } deleteElements(methodsToMove); return result; } @NotNull @Override public PyMemberInfo<PyFunction> apply(@NotNull final PyFunction pyFunction) { final PyUtil.MethodFlags flags = PyUtil.MethodFlags.of(pyFunction); assert flags != null : "No flags return while element is function " + pyFunction; final boolean isStatic = flags.isStaticMethod() || flags.isClassMethod(); return new PyMemberInfo<>(pyFunction, isStatic, buildDisplayMethodName(pyFunction), isOverrides(pyFunction), this, couldBeAbstract(pyFunction)); } /** * @return if method could be made abstract? (that means "create abstract version if method in parent class") */ private static boolean couldBeAbstract(@NotNull final PyFunction function) { if (PyUtil.isInitMethod(function)) { return false; // Who wants to make __init__ abstract?! } final PyUtil.MethodFlags flags = PyUtil.MethodFlags.of(function); assert flags != null : "Function should be called on method!"; final boolean py3K = LanguageLevel.forElement(function).isPy3K(); return flags.isInstanceMethod() || py3K; //Any method could be made abstract in py3 } @Nullable private static Boolean isOverrides(final PyFunction pyFunction) { final PyClass clazz = PyUtil.getContainingClassOrSelf(pyFunction); assert clazz != null : "Refactoring called on function, not method: " + pyFunction; for (final PyClass parentClass : clazz.getSuperClasses(null)) { final PyFunction parentMethod = parentClass.findMethodByName(pyFunction.getName(), true, null); if (parentMethod != null) { return true; } } return null; } @NotNull private static String buildDisplayMethodName(@NotNull final PyFunction pyFunction) { final StringBuilder builder = new StringBuilder(pyFunction.getName()); builder.append('('); final PyParameter[] arguments = pyFunction.getParameterList().getParameters(); for (final PyParameter parameter : arguments) { builder.append(parameter.getName()); if (arguments.length > 1 && parameter != arguments[arguments.length - 1]) { builder.append(", "); } } builder.append(')'); return builder.toString(); } /** * Filters member infos to find if they should be abstracted */ private static class AbstractFilter extends NotNullPredicate<PyMemberInfo<PyFunction>> { private final boolean myAllowAbstractOnly; /** * @param allowAbstractOnly returns only methods to be abstracted. Returns only methods to be moved otherwise. */ private AbstractFilter(final boolean allowAbstractOnly) { myAllowAbstractOnly = allowAbstractOnly; } @Override protected boolean applyNotNull(@NotNull final PyMemberInfo<PyFunction> input) { return input.isToAbstract() == myAllowAbstractOnly; } } private static class MyPyRecursiveElementVisitor extends PyRecursiveElementVisitorWithResult { @Override public void visitPyCallExpression(final PyCallExpression node) { // TODO: refactor, messy code final PyExpression callee = node.getCallee(); if (callee != null) { final PsiReference calleeRef = callee.getReference(); if (calleeRef != null) { final PsiElement calleeDeclaration = calleeRef.resolve(); if (calleeDeclaration instanceof PyFunction) { final PyFunction calleeFunction = (PyFunction)calleeDeclaration; final PyClass clazz = calleeFunction.getContainingClass(); if (clazz != null) { if (PyUtil.isInitMethod(calleeFunction)) { return; // Init call should not be marked as dependency } myResult.putValue(clazz, calleeFunction); } } } } } } /** * Filter out property setters and getters */ private static class NoPropertiesPredicate implements Predicate<PyFunction> { @Override public boolean apply(@NotNull PyFunction input) { return input.getProperty() == null; } } }
/* * Copyright (c) 2017, 2020, Oracle and/or its affiliates. All rights reserved. */ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.bcel.internal.util; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.util.HashSet; import java.util.Set; import com.sun.org.apache.bcel.internal.Const; import com.sun.org.apache.bcel.internal.classfile.Attribute; import com.sun.org.apache.bcel.internal.classfile.ClassParser; import com.sun.org.apache.bcel.internal.classfile.ConstantPool; import com.sun.org.apache.bcel.internal.classfile.JavaClass; import com.sun.org.apache.bcel.internal.classfile.Method; import com.sun.org.apache.bcel.internal.classfile.Utility; /** * Read class file(s) and convert them into HTML files. * * Given a JavaClass object "class" that is in package "package" five files * will be created in the specified directory. * * <OL> * <LI> "package"."class".html as the main file which defines the frames for * the following subfiles. * <LI> "package"."class"_attributes.html contains all (known) attributes found in the file * <LI> "package"."class"_cp.html contains the constant pool * <LI> "package"."class"_code.html contains the byte code * <LI> "package"."class"_methods.html contains references to all methods and fields of the class * </OL> * * All subfiles reference each other appropriately, e.g. clicking on a * method in the Method's frame will jump to the appropriate method in * the Code frame. * * @LastModified: Jan 2020 */ public class Class2HTML { private final JavaClass java_class; // current class object private final String dir; private static String class_package; // name of package, unclean to make it static, but ... private static String class_name; // name of current class, dito private static ConstantPool constant_pool; private static final Set<String> basic_types = new HashSet<>(); static { basic_types.add("int"); basic_types.add("short"); basic_types.add("boolean"); basic_types.add("void"); basic_types.add("char"); basic_types.add("byte"); basic_types.add("long"); basic_types.add("double"); basic_types.add("float"); } /** * Write contents of the given JavaClass into HTML files. * * @param java_class The class to write * @param dir The directory to put the files in */ public Class2HTML(final JavaClass java_class, final String dir) throws IOException { final Method[] methods = java_class.getMethods(); this.java_class = java_class; this.dir = dir; class_name = java_class.getClassName(); // Remember full name constant_pool = java_class.getConstantPool(); // Get package name by tacking off everything after the last `.' final int index = class_name.lastIndexOf('.'); if (index > -1) { class_package = class_name.substring(0, index); } else { class_package = ""; // default package } final ConstantHTML constant_html = new ConstantHTML(dir, class_name, class_package, methods, constant_pool); /* Attributes can't be written in one step, so we just open a file * which will be written consequently. */ final AttributeHTML attribute_html = new AttributeHTML(dir, class_name, constant_pool, constant_html); new MethodHTML(dir, class_name, methods, java_class.getFields(), constant_html, attribute_html); // Write main file (with frames, yuk) writeMainHTML(attribute_html); new CodeHTML(dir, class_name, methods, constant_pool, constant_html); attribute_html.close(); } public static void main( final String[] argv ) throws IOException { final String[] file_name = new String[argv.length]; int files = 0; ClassParser parser = null; JavaClass java_class = null; String zip_file = null; final char sep = File.separatorChar; String dir = "." + sep; // Where to store HTML files /* Parse command line arguments. */ for (int i = 0; i < argv.length; i++) { if (argv[i].charAt(0) == '-') { // command line switch if (argv[i].equals("-d")) { // Specify target directory, default '.' dir = argv[++i]; if (!dir.endsWith("" + sep)) { dir = dir + sep; } final File store = new File(dir); if (!store.isDirectory()) { final boolean created = store.mkdirs(); // Create target directory if necessary if (!created) { if (!store.isDirectory()) { System.out.println("Tried to create the directory " + dir + " but failed"); } } } } else if (argv[i].equals("-zip")) { zip_file = argv[++i]; } else { System.out.println("Unknown option " + argv[i]); } } else { file_name[files++] = argv[i]; } } if (files == 0) { System.err.println("Class2HTML: No input files specified."); } else { // Loop through files ... for (int i = 0; i < files; i++) { System.out.print("Processing " + file_name[i] + "..."); if (zip_file == null) { parser = new ClassParser(file_name[i]); // Create parser object from file } else { parser = new ClassParser(zip_file, file_name[i]); // Create parser object from zip file } java_class = parser.parse(); new Class2HTML(java_class, dir); System.out.println("Done."); } } } /** * Utility method that converts a class reference in the constant pool, * i.e., an index to a string. */ static String referenceClass(final int index) { String str = constant_pool.getConstantString(index, Const.CONSTANT_Class); str = Utility.compactClassName(str); str = Utility.compactClassName(str, class_package + ".", true); return "<A HREF=\"" + class_name + "_cp.html#cp" + index + "\" TARGET=ConstantPool>" + str + "</A>"; } static String referenceType( final String type ) { String short_type = Utility.compactClassName(type); short_type = Utility.compactClassName(short_type, class_package + ".", true); final int index = type.indexOf('['); // Type is an array? String base_type = type; if (index > -1) { base_type = type.substring(0, index); // Tack of the `[' } // test for basic type if (basic_types.contains(base_type)) { return "<FONT COLOR=\"#00FF00\">" + type + "</FONT>"; } return "<A HREF=\"" + base_type + ".html\" TARGET=_top>" + short_type + "</A>"; } static String toHTML( final String str ) { final StringBuilder buf = new StringBuilder(); for (int i = 0; i < str.length(); i++) { char ch; switch (ch = str.charAt(i)) { case '<': buf.append("&lt;"); break; case '>': buf.append("&gt;"); break; case '\n': buf.append("\\n"); break; case '\r': buf.append("\\r"); break; default: buf.append(ch); } } return buf.toString(); } private void writeMainHTML( final AttributeHTML attribute_html ) throws IOException { try (PrintWriter file = new PrintWriter(new FileOutputStream(dir + class_name + ".html"))) { file.println("<HTML>\n" + "<HEAD><TITLE>Documentation for " + class_name + "</TITLE>" + "</HEAD>\n" + "<FRAMESET BORDER=1 cols=\"30%,*\">\n" + "<FRAMESET BORDER=1 rows=\"80%,*\">\n" + "<FRAME NAME=\"ConstantPool\" SRC=\"" + class_name + "_cp.html" + "\"\n MARGINWIDTH=\"0\" " + "MARGINHEIGHT=\"0\" FRAMEBORDER=\"1\" SCROLLING=\"AUTO\">\n" + "<FRAME NAME=\"Attributes\" SRC=\"" + class_name + "_attributes.html" + "\"\n MARGINWIDTH=\"0\" " + "MARGINHEIGHT=\"0\" FRAMEBORDER=\"1\" SCROLLING=\"AUTO\">\n" + "</FRAMESET>\n" + "<FRAMESET BORDER=1 rows=\"80%,*\">\n" + "<FRAME NAME=\"Code\" SRC=\"" + class_name + "_code.html\"\n MARGINWIDTH=0 " + "MARGINHEIGHT=0 FRAMEBORDER=1 SCROLLING=\"AUTO\">\n" + "<FRAME NAME=\"Methods\" SRC=\"" + class_name + "_methods.html\"\n MARGINWIDTH=0 " + "MARGINHEIGHT=0 FRAMEBORDER=1 SCROLLING=\"AUTO\">\n" + "</FRAMESET></FRAMESET></HTML>"); } final Attribute[] attributes = java_class.getAttributes(); for (int i = 0; i < attributes.length; i++) { attribute_html.writeAttribute(attributes[i], "class" + i); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.jvm.java; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.rules.ActionGraphBuilder; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.BuildRuleParams; import com.facebook.buck.core.rules.BuildRuleResolver; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter; import com.facebook.buck.core.toolchain.ToolchainProvider; import com.facebook.buck.core.util.immutables.BuckStyleValueWithBuilder; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.jvm.common.ResourceValidator; import com.facebook.buck.jvm.core.CalculateAbi; import com.facebook.buck.jvm.core.JavaAbis; import com.facebook.buck.jvm.java.JavaBuckConfig.SourceAbiVerificationMode; import com.facebook.buck.jvm.java.JavaBuckConfig.UnusedDependenciesAction; import com.facebook.buck.jvm.java.JavaBuckConfig.UnusedDependenciesConfig; import com.facebook.buck.jvm.java.JavaLibraryDescription.CoreArg; import com.facebook.buck.jvm.java.abi.AbiGenerationMode; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; import java.nio.file.Path; import java.util.Objects; import java.util.Optional; import java.util.SortedSet; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Nullable; import org.immutables.value.Value; @BuckStyleValueWithBuilder public abstract class DefaultJavaLibraryRules { public interface DefaultJavaLibraryConstructor { DefaultJavaLibrary newInstance( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, JarBuildStepsFactory jarBuildStepsFactory, SourcePathRuleFinder ruleFinder, Optional<SourcePath> proguardConfig, SortedSet<BuildRule> firstOrderPackageableDeps, ImmutableSortedSet<BuildRule> fullJarExportedDeps, ImmutableSortedSet<BuildRule> fullJarProvidedDeps, ImmutableSortedSet<BuildRule> fullJarExportedProvidedDeps, ImmutableSortedSet<BuildRule> runtimeDeps, @Nullable BuildTarget abiJar, @Nullable BuildTarget sourceOnlyAbiJar, Optional<String> mavenCoords, ImmutableSortedSet<BuildTarget> tests, boolean requiredForSourceOnlyAbi, UnusedDependenciesAction unusedDependenciesAction, Optional<UnusedDependenciesFinderFactory> unusedDependenciesFinderFactory, @Nullable CalculateSourceAbi previousRuleInPipeline, boolean isDesugarEnabled, boolean isInterfaceMethodsDesugarEnabled, boolean neverMarkAsUnusedDependency); } @org.immutables.builder.Builder.Parameter abstract BuildTarget getInitialBuildTarget(); @Value.Lazy BuildTarget getLibraryTarget() { BuildTarget initialBuildTarget = getInitialBuildTarget(); return JavaAbis.isLibraryTarget(initialBuildTarget) ? initialBuildTarget : JavaAbis.getLibraryTarget(initialBuildTarget); } @org.immutables.builder.Builder.Parameter abstract ProjectFilesystem getProjectFilesystem(); @org.immutables.builder.Builder.Parameter abstract ToolchainProvider getToolchainProvider(); @org.immutables.builder.Builder.Parameter abstract BuildRuleParams getInitialParams(); @org.immutables.builder.Builder.Parameter abstract ActionGraphBuilder getActionGraphBuilder(); @Value.Lazy SourcePathResolverAdapter getSourcePathResolver() { return getActionGraphBuilder().getSourcePathResolver(); } @org.immutables.builder.Builder.Parameter abstract ConfiguredCompilerFactory getConfiguredCompilerFactory(); @org.immutables.builder.Builder.Parameter abstract UnusedDependenciesAction getUnusedDependenciesAction(); @org.immutables.builder.Builder.Parameter @Nullable abstract JavaBuckConfig getJavaBuckConfig(); @Value.Default DefaultJavaLibraryConstructor getConstructor() { return DefaultJavaLibrary::new; } @Value.NaturalOrder abstract ImmutableSortedSet<SourcePath> getSrcs(); @Value.NaturalOrder abstract ImmutableSortedSet<SourcePath> getResources(); @Value.Check void validateResources() { ResourceValidator.validateResources( getSourcePathResolver(), getProjectFilesystem(), getResources()); } abstract Optional<SourcePath> getProguardConfig(); abstract ImmutableList<String> getPostprocessClassesCommands(); abstract Optional<Path> getResourcesRoot(); abstract Optional<SourcePath> getManifestFile(); abstract Optional<String> getMavenCoords(); @Value.NaturalOrder abstract ImmutableSortedSet<BuildTarget> getTests(); @Value.Default RemoveClassesPatternsMatcher getClassesToRemoveFromJar() { return RemoveClassesPatternsMatcher.EMPTY; } @Value.Default boolean getSourceOnlyAbisAllowed() { return true; } abstract JavacOptions getJavacOptions(); @Nullable abstract JavaLibraryDeps getDeps(); @org.immutables.builder.Builder.Parameter @Nullable abstract JavaLibraryDescription.CoreArg getArgs(); public DefaultJavaLibrary buildLibrary() { buildAllRules(); return (DefaultJavaLibrary) getActionGraphBuilder().getRule(getLibraryTarget()); } public BuildRule buildAbi() { buildAllRules(); return getActionGraphBuilder().getRule(getInitialBuildTarget()); } private void buildAllRules() { // To guarantee that all rules in a source-ABI pipeline are working off of the same settings, // we want to create them all from the same instance of this builder. To ensure this, we force // a request for whichever rule is closest to the root of the graph (regardless of which rule // was actually requested) and then create all of the rules inside that request. We're // requesting the rootmost rule because the rules are created from leafmost to rootmost and // we want any requests to block until all of the rules are built. BuildTarget rootmostTarget = getLibraryTarget(); if (willProduceCompareAbis()) { rootmostTarget = JavaAbis.getVerifiedSourceAbiJar(rootmostTarget); } else if (willProduceSourceAbiFromLibraryTarget()) { rootmostTarget = JavaAbis.getSourceAbiJar(rootmostTarget); } else if (willProduceClassAbi()) { rootmostTarget = JavaAbis.getClassAbiJar(rootmostTarget); } ActionGraphBuilder graphBuilder = getActionGraphBuilder(); graphBuilder.computeIfAbsent( rootmostTarget, target -> { if (willProduceSourceAbiFromLibraryTarget()) { DefaultJavaLibrary libraryRule = buildLibraryRule(/* sourceAbiRule */ null); CalculateSourceAbiFromLibraryTarget sourceAbiRule = buildSourceAbiRuleFromLibraryTarget(libraryRule); CalculateClassAbi classAbiRule = buildClassAbiRule(libraryRule); if (JavaAbis.isLibraryTarget(target)) { return libraryRule; } else if (JavaAbis.isClassAbiTarget(target)) { return classAbiRule; } else if (JavaAbis.isSourceAbiTarget(target)) { return sourceAbiRule; } } CalculateSourceAbi sourceOnlyAbiRule = buildSourceOnlyAbiRule(); CalculateSourceAbi sourceAbiRule = buildSourceAbiRule(); DefaultJavaLibrary libraryRule = buildLibraryRule(sourceAbiRule); CalculateClassAbi classAbiRule = buildClassAbiRule(libraryRule); CompareAbis compareAbisRule; if (sourceOnlyAbiRule != null) { compareAbisRule = buildCompareAbisRule(sourceAbiRule, sourceOnlyAbiRule); } else { compareAbisRule = buildCompareAbisRule(classAbiRule, sourceAbiRule); } if (JavaAbis.isLibraryTarget(target)) { return libraryRule; } else if (JavaAbis.isClassAbiTarget(target)) { return classAbiRule; } else if (JavaAbis.isVerifiedSourceAbiTarget(target)) { return compareAbisRule; } throw new AssertionError(); }); } @Nullable private <T extends BuildRule & CalculateAbi, U extends BuildRule & CalculateAbi> CompareAbis buildCompareAbisRule(@Nullable T correctAbi, @Nullable U experimentalAbi) { if (!willProduceCompareAbis()) { return null; } Objects.requireNonNull(correctAbi); Objects.requireNonNull(experimentalAbi); BuildTarget compareAbisTarget = JavaAbis.getVerifiedSourceAbiJar(getLibraryTarget()); return getActionGraphBuilder() .addToIndex( new CompareAbis( compareAbisTarget, getProjectFilesystem(), getInitialParams() .withDeclaredDeps(ImmutableSortedSet.of(correctAbi, experimentalAbi)) .withoutExtraDeps(), correctAbi.getSourcePathToOutput(), experimentalAbi.getSourcePathToOutput(), Objects.requireNonNull(getJavaBuckConfig()).getSourceAbiVerificationMode())); } @Value.Lazy @Nullable BuildTarget getAbiJar() { if (willProduceCompareAbis()) { return JavaAbis.getVerifiedSourceAbiJar(getLibraryTarget()); } else if (willProduceSourceAbi()) { return JavaAbis.getSourceAbiJar(getLibraryTarget()); } else if (willProduceClassAbi()) { return JavaAbis.getClassAbiJar(getLibraryTarget()); } return null; } @Value.Lazy @Nullable BuildTarget getSourceOnlyAbiJar() { if (willProduceSourceOnlyAbi()) { return JavaAbis.getSourceOnlyAbiJar(getLibraryTarget()); } return null; } private boolean willProduceAbiJar() { return !getSrcs().isEmpty() || !getResources().isEmpty() || getManifestFile().isPresent(); } // regex pattern to extract java version from both "7" and "1.7" notations. private static final Pattern JAVA_VERSION_PATTERN = Pattern.compile("^(1\\.)*(?<version>\\d)$"); private boolean isDesugarRequired() { String sourceLevel = getJavacOptions().getLanguageLevelOptions().getSourceLevel(); Matcher matcher = JAVA_VERSION_PATTERN.matcher(sourceLevel); if (!matcher.find()) { return false; } int version = Integer.parseInt(matcher.group("version")); // Currently only java 8+ requires desugaring on Android return version > 7; } @Value.Lazy AbiGenerationMode getAbiGenerationMode() { AbiGenerationMode result = null; CoreArg args = getArgs(); if (args != null) { result = args.getAbiGenerationMode().orElse(null); } // Respect user input if provided if (result != null) { return result; } // Infer ABI generation mode based on properties of build target result = Objects.requireNonNull(getConfiguredCompilerFactory()).getAbiGenerationMode(); if (result == AbiGenerationMode.CLASS) { return result; } if (!shouldBuildSourceAbi()) { return AbiGenerationMode.CLASS; } if (result != AbiGenerationMode.SOURCE && (!getSourceOnlyAbisAllowed() || !pluginsSupportSourceOnlyAbis())) { return AbiGenerationMode.SOURCE; } if (result == AbiGenerationMode.MIGRATING_TO_SOURCE_ONLY && !getConfiguredCompilerFactory().shouldMigrateToSourceOnlyAbi()) { return AbiGenerationMode.SOURCE; } if (result == AbiGenerationMode.SOURCE_ONLY && !getConfiguredCompilerFactory().shouldGenerateSourceOnlyAbi()) { return AbiGenerationMode.SOURCE; } return result; } @Value.Lazy SourceAbiVerificationMode getSourceAbiVerificationMode() { JavaBuckConfig javaBuckConfig = getJavaBuckConfig(); CoreArg args = getArgs(); SourceAbiVerificationMode result = null; if (args != null) { result = args.getSourceAbiVerificationMode().orElse(null); } if (result == null) { result = javaBuckConfig != null ? javaBuckConfig.getSourceAbiVerificationMode() : SourceAbiVerificationMode.OFF; } return result; } private boolean willProduceSourceAbiFromLibraryTarget() { return willProduceSourceAbi() && getConfiguredCompilerFactory().sourceAbiCopiesFromLibraryTargetOutput(); } private boolean willProduceSourceAbi() { return willProduceAbiJar() && getAbiGenerationMode().isSourceAbi(); } private boolean willProduceSourceOnlyAbi() { return willProduceSourceAbi() && !getAbiGenerationMode().usesDependencies(); } private boolean willProduceClassAbi() { return willProduceAbiJar() && (!willProduceSourceAbi() || willProduceCompareAbis()); } private boolean willProduceCompareAbis() { return willProduceSourceAbi() && getSourceAbiVerificationMode() != JavaBuckConfig.SourceAbiVerificationMode.OFF; } private boolean shouldBuildSourceAbi() { return getConfiguredCompilerFactory().shouldGenerateSourceAbi() && !getSrcs().isEmpty() && getPostprocessClassesCommands().isEmpty(); } private boolean pluginsSupportSourceOnlyAbis() { ImmutableList<ResolvedJavacPluginProperties> annotationProcessors = Objects.requireNonNull(getJavacOptions()) .getJavaAnnotationProcessorParams() .getPluginProperties(); for (ResolvedJavacPluginProperties annotationProcessor : annotationProcessors) { if (!annotationProcessor.getDoesNotAffectAbi() && !annotationProcessor.getSupportAbiGenerationFromSource()) { // Processor is ABI-affecting but cannot run during ABI generation from source; disallow return false; } } return true; } private DefaultJavaLibrary buildLibraryRule(@Nullable CalculateSourceAbi sourceAbiRule) { DefaultJavaLibraryClasspaths classpaths = getClasspaths(); UnusedDependenciesAction unusedDependenciesAction = getUnusedDependenciesAction(); Optional<UnusedDependenciesFinderFactory> unusedDependenciesFinderFactory = Optional.empty(); if (unusedDependenciesAction != UnusedDependenciesAction.IGNORE && getConfiguredCompilerFactory().trackClassUsage(getJavacOptions())) { BuildRuleResolver buildRuleResolver = getActionGraphBuilder(); unusedDependenciesFinderFactory = Optional.of( new UnusedDependenciesFinderFactory( Objects.requireNonNull(getJavaBuckConfig()) .getUnusedDependenciesBuildozerString(), Objects.requireNonNull(getJavaBuckConfig()) .isUnusedDependenciesOnlyPrintCommands(), UnusedDependenciesFinder.getDependencies( buildRuleResolver, buildRuleResolver.getAllRules( Objects.requireNonNull(getDeps()).getDepTargets())), UnusedDependenciesFinder.getDependencies( buildRuleResolver, buildRuleResolver.getAllRules( Objects.requireNonNull(getDeps()).getProvidedDepTargets())))); } DefaultJavaLibrary libraryRule = getConstructor() .newInstance( getLibraryTarget(), getProjectFilesystem(), getJarBuildStepsFactory(), getActionGraphBuilder(), getProguardConfig(), classpaths.getFirstOrderPackageableDeps(), Objects.requireNonNull(getDeps()).getExportedDeps(), Objects.requireNonNull(getDeps()).getProvidedDeps(), Objects.requireNonNull(getDeps()).getExportedProvidedDeps(), Objects.requireNonNull(getDeps()).getRuntimeDeps(), getAbiJar(), getSourceOnlyAbiJar(), getMavenCoords(), getTests(), getRequiredForSourceOnlyAbi(), unusedDependenciesAction, unusedDependenciesFinderFactory, sourceAbiRule, isDesugarRequired(), getConfiguredCompilerFactory().shouldDesugarInterfaceMethods(), getArgs() != null && getArgs().getNeverMarkAsUnusedDependency().orElse(false)); getActionGraphBuilder().addToIndex(libraryRule); return libraryRule; } private boolean getRequiredForSourceOnlyAbi() { return getArgs() != null && getArgs().getRequiredForSourceOnlyAbi(); } @Nullable private CalculateSourceAbi buildSourceOnlyAbiRule() { if (!willProduceSourceOnlyAbi()) { return null; } JarBuildStepsFactory jarBuildStepsFactory = getJarBuildStepsFactoryForSourceOnlyAbi(); BuildTarget sourceAbiTarget = JavaAbis.getSourceOnlyAbiJar(getLibraryTarget()); return getActionGraphBuilder() .addToIndex( new CalculateSourceAbi( sourceAbiTarget, getProjectFilesystem(), jarBuildStepsFactory, getActionGraphBuilder())); } @Nullable private CalculateSourceAbi buildSourceAbiRule() { if (!willProduceSourceAbi()) { return null; } JarBuildStepsFactory jarBuildStepsFactory = getJarBuildStepsFactory(); BuildTarget sourceAbiTarget = JavaAbis.getSourceAbiJar(getLibraryTarget()); return getActionGraphBuilder() .addToIndex( new CalculateSourceAbi( sourceAbiTarget, getProjectFilesystem(), jarBuildStepsFactory, getActionGraphBuilder())); } @Nullable private CalculateSourceAbiFromLibraryTarget buildSourceAbiRuleFromLibraryTarget( DefaultJavaLibrary libraryRule) { if (!willProduceSourceAbi()) { return null; } BuildTarget sourceAbiTarget = JavaAbis.getSourceAbiJar(getLibraryTarget()); return getActionGraphBuilder() .addToIndex( new CalculateSourceAbiFromLibraryTarget( libraryRule.getSourcePathToOutput(), sourceAbiTarget, getProjectFilesystem(), getActionGraphBuilder())); } @Nullable private CalculateClassAbi buildClassAbiRule(DefaultJavaLibrary libraryRule) { if (!willProduceClassAbi()) { return null; } BuildTarget classAbiTarget = JavaAbis.getClassAbiJar(getLibraryTarget()); return getActionGraphBuilder() .addToIndex( CalculateClassAbi.of( classAbiTarget, getActionGraphBuilder(), getProjectFilesystem(), libraryRule.getSourcePathToOutput(), getAbiCompatibilityMode())); } @Value.Lazy AbiGenerationMode getAbiCompatibilityMode() { return getJavaBuckConfig() == null || getJavaBuckConfig().getSourceAbiVerificationMode() == SourceAbiVerificationMode.OFF ? AbiGenerationMode.CLASS // Use the BuckConfig version (rather than the inferred one) because if any // targets are using source_only it can affect the output of other targets // in ways that are hard to simulate : getConfiguredCompilerFactory().getAbiGenerationMode(); } @Value.Lazy DefaultJavaLibraryClasspaths getClasspaths() { return ImmutableDefaultJavaLibraryClasspaths.builder(getActionGraphBuilder()) .setBuildRuleParams(getInitialParams()) .setDeps(Objects.requireNonNull(getDeps())) .setCompileAgainstLibraryType(getCompileAgainstLibraryType()) .build(); } @Value.Lazy DefaultJavaLibraryClasspaths getClasspathsForSourceOnlyAbi() { return getClasspaths().getSourceOnlyAbiClasspaths(); } @Value.Lazy CompileToJarStepFactory getConfiguredCompiler() { return getConfiguredCompilerFactory() .configure( getArgs(), getJavacOptions(), getActionGraphBuilder(), getInitialBuildTarget().getTargetConfiguration(), getToolchainProvider()); } @Value.Lazy CompileToJarStepFactory getConfiguredCompilerForSourceOnlyAbi() { return getConfiguredCompilerFactory() .configure( getArgs(), getJavacOptionsForSourceOnlyAbi(), getActionGraphBuilder(), getInitialBuildTarget().getTargetConfiguration(), getToolchainProvider()); } @Value.Lazy JavacOptions getJavacOptionsForSourceOnlyAbi() { JavacOptions javacOptions = getJavacOptions(); return javacOptions.withJavaAnnotationProcessorParams( abiProcessorsOnly(javacOptions.getJavaAnnotationProcessorParams())); } private JavacPluginParams abiProcessorsOnly(JavacPluginParams annotationProcessingParams) { return annotationProcessingParams.withAbiProcessorsOnly(); } @Value.Lazy CompileAgainstLibraryType getCompileAgainstLibraryType() { CoreArg args = getArgs(); CompileAgainstLibraryType result = CompileAgainstLibraryType.SOURCE_ONLY_ABI; if (args != null) { result = args.getCompileAgainst().orElse(result); } if (!getConfiguredCompilerFactory().shouldCompileAgainstAbis()) { result = CompileAgainstLibraryType.FULL; } return result; } @Value.Lazy JarBuildStepsFactory getJarBuildStepsFactory() { DefaultJavaLibraryClasspaths classpaths = getClasspaths(); return new JarBuildStepsFactory( getLibraryTarget(), getConfiguredCompiler(), getSrcs(), getResources(), getResourcesParameters(), getManifestFile(), getPostprocessClassesCommands(), getConfiguredCompilerFactory().trackClassUsage(getJavacOptions()), getJavacOptions().trackJavacPhaseEvents(), getClassesToRemoveFromJar(), getAbiGenerationMode(), getAbiCompatibilityMode(), classpaths.getDependencyInfos(), getRequiredForSourceOnlyAbi()); } @Value.Lazy JarBuildStepsFactory getJarBuildStepsFactoryForSourceOnlyAbi() { DefaultJavaLibraryClasspaths classpaths = getClasspathsForSourceOnlyAbi(); return new JarBuildStepsFactory( getLibraryTarget(), getConfiguredCompilerForSourceOnlyAbi(), getSrcs(), getResources(), getResourcesParameters(), getManifestFile(), getPostprocessClassesCommands(), getConfiguredCompilerFactory().trackClassUsage(getJavacOptions()), getJavacOptions().trackJavacPhaseEvents(), getClassesToRemoveFromJar(), getAbiGenerationMode(), getAbiCompatibilityMode(), classpaths.getDependencyInfos(), getRequiredForSourceOnlyAbi()); } private ResourcesParameters getResourcesParameters() { return ResourcesParameters.create( getProjectFilesystem(), getActionGraphBuilder(), getResources(), getResourcesRoot()); } /** * This is a little complicated, but goes along the lines of: 1. If the buck config value is * "ignore_always", then ignore. 2. If the buck config value is "warn_if_fail", then downgrade a * local "fail" to "warn". 3. Use the local action if available. 4. Use the buck config value if * available. 5. Default to ignore. */ private static UnusedDependenciesAction getUnusedDependenciesAction( @Nullable JavaBuckConfig javaBuckConfig, @Nullable JavaLibraryDescription.CoreArg args) { UnusedDependenciesAction localAction = args == null ? null : args.getOnUnusedDependencies().orElse(null); UnusedDependenciesConfig configAction = javaBuckConfig == null ? null : javaBuckConfig.getUnusedDependenciesAction(); if (configAction == UnusedDependenciesConfig.IGNORE_ALWAYS) { return UnusedDependenciesAction.IGNORE; } if (configAction == UnusedDependenciesConfig.WARN_IF_FAIL && localAction == UnusedDependenciesAction.FAIL) { return UnusedDependenciesAction.WARN; } if (localAction != null) { return localAction; } if (configAction == UnusedDependenciesConfig.FAIL) { return UnusedDependenciesAction.FAIL; } else if (configAction == UnusedDependenciesConfig.WARN) { return UnusedDependenciesAction.WARN; } else { return UnusedDependenciesAction.IGNORE; } } @org.immutables.builder.Builder.AccessibleFields public static class Builder extends ImmutableDefaultJavaLibraryRules.Builder { public Builder( BuildTarget initialBuildTarget, ProjectFilesystem projectFilesystem, ToolchainProvider toolchainProvider, BuildRuleParams initialParams, ActionGraphBuilder graphBuilder, ConfiguredCompilerFactory configuredCompilerFactory, @Nullable JavaBuckConfig javaBuckConfig, @Nullable JavaLibraryDescription.CoreArg args) { super( initialBuildTarget, projectFilesystem, toolchainProvider, initialParams, graphBuilder, configuredCompilerFactory, getUnusedDependenciesAction(javaBuckConfig, args), javaBuckConfig, args); this.actionGraphBuilder = graphBuilder; if (args != null) { setSrcs(args.getSrcs()) .setResources(args.getResources()) .setResourcesRoot(args.getResourcesRoot()) .setProguardConfig(args.getProguardConfig()) .setPostprocessClassesCommands(args.getPostprocessClassesCommands()) .setDeps( JavaLibraryDeps.newInstance( args, graphBuilder, initialBuildTarget.getTargetConfiguration(), configuredCompilerFactory)) .setTests(args.getTests()) .setManifestFile(args.getManifestFile()) .setMavenCoords(args.getMavenCoords()) .setClassesToRemoveFromJar(new RemoveClassesPatternsMatcher(args.getRemoveClasses())); } } Builder() { throw new UnsupportedOperationException(); } @Nullable public JavaLibraryDeps getDeps() { return deps; } } }
package org.lantern.proxy.pt; import java.io.IOException; import java.util.Properties; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; import org.apache.http.HttpResponse; import org.apache.http.client.fluent.Form; import org.apache.http.client.fluent.Request; import org.apache.http.client.fluent.Response; import org.apache.http.util.EntityUtils; import org.lantern.ConnectivityChangedEvent; import org.lantern.LanternClientConstants; import org.lantern.LanternUtils; import org.lantern.Messages; import org.lantern.Shutdownable; import org.lantern.Tr; import org.lantern.event.Events; import org.lantern.event.ModeChangedEvent; import org.lantern.state.Mode; import org.lantern.state.Model; import org.lantern.state.Notification.MessageType; import org.lantern.util.GatewayUtil; import org.lantern.util.Threads; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.Subscribe; import com.google.inject.Inject; import com.google.inject.Singleton; @Singleton public class FlashlightServerManager implements Shutdownable { private static final Logger LOGGER = LoggerFactory .getLogger(FlashlightServerManager.class); /** * Use internal port 443 plus myleshorton's lucky number 77. */ private static final int PREFERRED_FLASHLIGHT_INTERNAL_PORT = 44377; private static final int FLASHLIGHT_EXTERNAL_PORT = 443; private static final long HEARTBEAT_PERIOD_MINUTES = 2; private final Model model; private final Messages msgs; private volatile Flashlight flashlight; private volatile ScheduledExecutorService heartbeat; private final AtomicBoolean needPortMappingWarning = new AtomicBoolean(true); private final AtomicBoolean connectivityCheckFailing = new AtomicBoolean(); /** * The last time a mapping succeeded. */ private long lastSuccessfulMapping = 0L; @Inject public FlashlightServerManager( Model model, Messages messages) { LOGGER.info("Starting up..."); this.model = model; this.msgs = messages; Events.register(this); } @Subscribe public void onModeChanged(ModeChangedEvent event) { boolean inGiveMode = event.getNewMode() == Mode.give; boolean isConnected = model.getConnectivity().isInternet(); update(inGiveMode, isConnected); if (!inGiveMode) { hidePortMappingSuccess(); hidePortMappingWarning(); } } @Subscribe public void onConnectivityChanged( final ConnectivityChangedEvent event) { boolean inGiveMode = LanternUtils.isGive(); boolean isConnected = event.isConnected(); update(inGiveMode, isConnected); } @Override synchronized public void stop() { LOGGER.debug("Flashlight manager closing."); stopFlashlight(model.getConnectivity().isInternet()); } synchronized private void update(boolean inGiveMode, boolean isConnected) { boolean eligibleToRun = inGiveMode && isConnected; boolean running = flashlight != null; needPortMappingWarning.set(true); if (eligibleToRun && !running) { connectivityCheckFailing.set(false); startFlashlight(); } else if (!eligibleToRun && running) { stopFlashlight(isConnected); } } private void startFlashlight() { LOGGER.debug("Starting flashlight"); heartbeat = Threads .newSingleThreadScheduledExecutor("FlashlightServerManager-Heartbeat"); heartbeat.scheduleAtFixedRate(peerRegistrar, 10, HEARTBEAT_PERIOD_MINUTES * 60, TimeUnit.SECONDS); try { // Note if this call succeeds it blocks indefinitely. runFlashlight(true); } catch (RuntimeException re) { final String msg = re.getMessage(); if (msg != null && msg.contains("Exit value: 50")) { LOGGER.info("Unable to start flashlight with automatically mapped external port, try without mapping"); // Note if this call succeeds it blocks indefinitely. runFlashlight(false); } else { LOGGER.error("Unexpected runtime exception", re); throw re; } } } private void stopFlashlight(boolean unregister) { LOGGER.debug("Stopping flashlight"); if (unregister) { unregisterPeer(); } if (heartbeat != null) { heartbeat.shutdownNow(); } if (flashlight != null) { flashlight.stopServer(); } heartbeat = null; flashlight = null; } private void runFlashlight(boolean mapExternalPort) { Properties props = new Properties(); String instanceId = model.getInstanceId(); props.setProperty( Flashlight.SERVER_KEY, instanceId + ".getiantem.org"); String externalPort = "0"; if (mapExternalPort) { externalPort = Integer.toString(FLASHLIGHT_EXTERNAL_PORT); } props.setProperty(Flashlight.PORTMAP_KEY, externalPort); LOGGER.debug("Props: {}", props); flashlight = new Flashlight(props); int localPort = LanternUtils .findFreePort(PREFERRED_FLASHLIGHT_INTERNAL_PORT); flashlight.startServer(localPort, null); LOGGER.info("Finished starting server..."); } private Runnable peerRegistrar = new Runnable() { @Override public void run() { boolean externallyAccessible = registerPeer(); if (externallyAccessible) { LOGGER.debug("Confirmed able to proxy for external clients!"); hidePortMappingWarning(); needPortMappingWarning.set(true); lastSuccessfulMapping = System.currentTimeMillis(); if (connectivityCheckFailing.getAndSet(false)) { showPortMappingSuccess(); } } else { LOGGER.info("Unable to proxy for external clients!"); connectivityCheckFailing.set(true); hidePortMappingSuccess(); if (needPortMappingWarning.getAndSet(false) && shouldShowPortMappingFailure()) { showPortMappingWarning(); } unregisterPeer(); } } }; /** * Only should the failure message if the last successful mapping was * sufficiently old. * * @return <code>true</code> if we should show the mapping failure, * otherwise <code>false</code> */ private boolean shouldShowPortMappingFailure() { return System.currentTimeMillis() - lastSuccessfulMapping > 5 * DateUtils.MILLIS_PER_MINUTE; } private boolean registerPeer() { LOGGER.info("Registering peer..."); Response response = null; try { response = Request .Post( "https://" + model.getS3Config().getDnsRegUrl()+"/register") .bodyForm( Form.form() .add("name", model.getInstanceId()) .add("port", "" + FLASHLIGHT_EXTERNAL_PORT) // Note - the below is only used for testing locally // The production dns registration service determines // the IP based on the network client/X-Forwarded-For // header. // model.getConnectivity().getIp() may actually return // null here since we may or may not have obtained a // public IP at this point. .add("ip", model.getConnectivity().getIp()) .add("v", LanternClientConstants.VERSION) .build()) .connectTimeout(100 * 1000) .socketTimeout(100 * 1000) .execute(); HttpResponse httpResponse = response.returnResponse(); if (httpResponse.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { LOGGER.info("Registered peer"); return true; } LOGGER.error("Unable to register peer: {}", EntityUtils.toString(httpResponse.getEntity())); } catch (IOException e) { LOGGER.error("Exception trying to register peer", e); } finally { if (response != null) { response.discardContent(); } } return false; } private void unregisterPeer() { Response response = null; try { response = Request .Post("https://" + model.getS3Config().getDnsRegUrl() + "/unregister") .bodyForm( Form.form().add("name", model.getInstanceId()) .build()) .connectTimeout(100 * 1000) .socketTimeout(100 * 1000) .execute(); if (response.returnResponse().getStatusLine().getStatusCode() != HttpStatus.SC_OK) { LOGGER.error("Unable to unregister peer: {}", response .returnContent().asString()); } else { LOGGER.debug("Unregistered peer"); } } catch (IOException e) { LOGGER.error("Exception trying to unregister peer: " + e); } finally { if (response != null) { response.discardContent(); } } } private void showPortMappingWarning() { try { // Make sure there actually is an accessible gateway // screen before prompting the user to connect to it. final String gateway = GatewayUtil.defaultGateway(); if (StringUtils.isNotBlank(gateway)) { msgs.msg(Tr.tr("BACKEND_MANUAL_NETWORK_PROMPT"), MessageType.error, 0, true); } } catch (IOException e) { LOGGER.debug("Gateway may not exist", e); } catch (InterruptedException e) { LOGGER.debug("Gateway may not exist", e); } } private void showPortMappingSuccess() { msgs.msg(Tr.tr("BACKEND_MANUAL_NETWORK_SUCCESS"), MessageType.success, 0, false); } private void hidePortMappingWarning() { msgs.closeMsg(Tr.tr("BACKEND_MANUAL_NETWORK_PROMPT"), MessageType.error); } private void hidePortMappingSuccess() { msgs.closeMsg(Tr.tr("BACKEND_MANUAL_NETWORK_SUCCESS"), MessageType.error); } }
import java.util.Arrays; public class Denoiser implements AudioProcessor { private static int windowLength; private static double overlapRatio; private int fs; private double noSpeechDuration; private int noSpeechSegments; private boolean speechFlag; private boolean noiseFlag; private int noiseCounter; private int noiseLength; private int noiseThreshold; private int frameReset; public Denoiser(int fs) { windowLength = 256; overlapRatio = 0.5; this.fs = fs; this.noSpeechDuration = 0.4; this.noSpeechSegments = (int)Math.floor((noSpeechDuration * fs - windowLength) / (overlapRatio * windowLength) + 1); this.speechFlag = false; this.noiseFlag = false; this.noiseLength = 9; this.noiseThreshold = 3; this.frameReset = 8; } public Denoiser(int fs, double noSpeechDuration) { windowLength = 256; overlapRatio = 0.5; this.fs = fs; this.noSpeechDuration = noSpeechDuration; this.noSpeechSegments = (int)Math.floor((noSpeechDuration * fs - windowLength) / (overlapRatio * windowLength) + 1); this.speechFlag = false; this.noiseFlag = false; this.noiseLength = 9; this.noiseThreshold = 3; this.frameReset = 8; } public Denoiser(int fs, double noSpeechDuration, int noiseLength, int noiseThreshold, int frameReset) { windowLength = 256; overlapRatio = 0.5; this.fs = fs; this.noSpeechDuration = noSpeechDuration; this.noSpeechSegments = (int)Math.floor((noSpeechDuration * fs - windowLength) / (overlapRatio * windowLength) + 1); this.speechFlag = false; this.noiseFlag = false; this.noiseLength = noiseLength; this.noiseThreshold = noiseThreshold; this.frameReset = frameReset; } /** * Process function for multi-channel inputs * @param input Multi channel signal * @return enhanced Multi channel enhanced signal */ public double[][] process(double[][] input) { int channels = input.length; int signalLength = input[0].length; double[][] enhanced = new double[channels][signalLength]; for (int i = 0; i < channels; i++) { enhanced[i] = process(input[i]); } return enhanced; } /** * Performs speech denoising on array of doubles based on Speech Enhancement Using a Minimum Mean-Square * Error Short-Time Spectral Amplitude Estimator by Eprahiam and Malah * @param input Double array of signal values * @return enhanced Double array of enhanced signal array */ public double[] process(double[] input) { double[][] sampledSignalWindowed = segmentSignal(input, windowLength, overlapRatio); int frames = sampledSignalWindowed[0].length; ComplexNumber[][] sampledSignalWindowedComplex = new ComplexNumber[frames][windowLength]; ComplexNumber[][] signalFFT = new ComplexNumber[frames][windowLength]; double[][] signalFFTMagnitude = new double[frames][windowLength]; double[][] signalFFTPhase = new double[frames][windowLength]; for (int i = 0; i < frames; i++) { for (int k = 0; k < windowLength; k++) { sampledSignalWindowedComplex[i][k] = new ComplexNumber(sampledSignalWindowed[k][i]); //convert samples to Complex form for fft and perform transpose } } for (int i = 0; i < frames; i++) { signalFFT[i] = Utils.fft(sampledSignalWindowedComplex[i]); } for (int i = 0; i < frames; i++) { for (int k = 0; k < windowLength; k++) { signalFFTMagnitude[i][k] = signalFFT[i][k].mod(); signalFFTPhase[i][k] = signalFFT[i][k].getArg(); } } double[][] noise = new double[this.noSpeechSegments][windowLength]; double[][] noiseMag = new double[this.noSpeechSegments][windowLength]; noise = Arrays.copyOfRange(signalFFTMagnitude, 0, this.noSpeechSegments); for (int i = 0; i < this.noSpeechSegments; i++) { for (int k = 0; k < windowLength; k++) { noiseMag[i][k] = Math.pow(noise[i][k], 2); } } double[] noiseMean = Utils.mean(noise, 0); double[] noiseVar = Utils.mean(noiseMag, 0); double gamma1p5 = Utils.gamma(1.5); double[] gain = new double[windowLength]; double[] gamma = new double[windowLength]; double[] gammaUpdate = new double[windowLength]; double[] xi = new double[windowLength]; double[] nu = new double[windowLength]; double alpha = 0.96; //Smoothing factor Arrays.fill(gain, 1); Arrays.fill(gamma, 1); double[][] enhancedSpectrum = new double[frames][windowLength]; for (int i = 0; i < frames; i++) { if (i < this.noSpeechSegments) { this.speechFlag = false; this.noiseCounter = 100; } else { vad(signalFFTMagnitude[i], noiseMean); } if (this.speechFlag == false) { // Noise estimate update during segements with no speech for (int k = 0; k < windowLength; k++) { noiseMean[k] = (this.noiseLength * noiseMean[k] + signalFFTMagnitude[i][k]) / (this.noiseLength + 1); noiseVar[k] = (this.noiseLength * noiseVar[k] + Math.pow(signalFFTMagnitude[i][k], 2)) / (this.noiseLength + 1); } } for (int k = 0; k < windowLength; k++) { gammaUpdate[k] = Math.pow(signalFFTMagnitude[i][k], 2) / noiseVar[k]; xi[k] = alpha * Math.pow(gain[k], 2) * gamma[k] + (1 - alpha) * Math.max(gammaUpdate[k] - 1, 0); gamma[k] = gammaUpdate[k]; nu[k] = gamma[k] * xi[k] / (xi[k] + 1); gain[k] = (gamma1p5 * Math.sqrt(nu[k])) / gamma[k] * Math.exp(-1 * nu[k] / 2) * ((1 + nu[k]) * Bessel.modBesselFirstZero(nu[k] / 2) + nu[k] * Bessel.modBesselFirstOne(nu[k] / 2)); if (Double.isNaN(gain[k]) || Double.isInfinite(gain[k])) { gain[k] = xi[k] / (xi[k] + 1); } enhancedSpectrum[i][k] = gain[k] * signalFFTMagnitude[i][k]; } } ComplexNumber[][] enhancedSpectrumComplex = new ComplexNumber[frames][windowLength]; for (int i = 0; i < frames; i++) { for (int k = 0; k < windowLength; k++) { enhancedSpectrumComplex[i][k] = ComplexNumber.exp(new ComplexNumber(0, signalFFTPhase[i][k])); enhancedSpectrumComplex[i][k] = enhancedSpectrumComplex[i][k].times(enhancedSpectrum[i][k]); } } ComplexNumber[][] enhancedSegments = new ComplexNumber[frames][windowLength]; double[][] enhancedSegmentsReal = new double[windowLength][frames]; for (int i = 0; i < frames; i++) { enhancedSegments[i] = Utils.ifft(enhancedSpectrumComplex[i]); } for (int i = 0; i < frames; i++) { for (int k = 0; k < windowLength; k++) { enhancedSegmentsReal[k][i] = enhancedSegments[i][k].getRe(); //convert samples to real from and perform tranpose } } double[] enhanced = overlapAndAdd(enhancedSegmentsReal, overlapRatio); return enhanced; } /** * Voice activity detector that predicts wheter the current frame contains speech or not * @param frame Current frame * @param noise Current noise estimate * @param noiseCounter Number of previous noise frames * @param noiseThreshold User set threshold * @param frameReset Number of frames after which speech flag is reset */ private void vad(double[] frame, double[] noise) { double[] spectralDifference = new double[windowLength]; for (int i = 0; i < windowLength; i++) { spectralDifference[i] = 20 * (Math.log10(frame[i]) - Math.log10(noise[i])); if (spectralDifference[i] < 0) { spectralDifference[i] = 0; } } double diff = Utils.mean(spectralDifference); if (diff < this.noiseThreshold) { this.noiseFlag = true; this.noiseCounter++; } else { this.noiseFlag = false; this.noiseCounter = 0; } if (this.noiseCounter > this.frameReset) { this.speechFlag = false; } else { this.speechFlag = true; } } /** * Windows sampled signal using overlapping Hamming windows * @param ss The sampled signal * @param ww The window width * @param or The overlap ratio * @return seg The overlapping windowed segments */ private double[][] segmentSignal(double[] ss, int ww, double or ) { int len = ss.length; double d = 1 - or; int frames = (int)(Math.floor(len - ww) / ww / d); int start = 0; int stop = 0; double[] window = Utils.hamming(ww); double[][] seg = new double[ww][frames]; for (int i = 0; i < frames; i++) { start = (int)(i * ww * or ); stop = start + ww; for (int k = 0; k < ww; k++) { seg[k][i] = ss[start + k] * window[k]; } } return seg; } /** * Overlap and add segments to calculate reconstructed signal * @param segments 2D array of overlapping signal segments * @param or overlap ratio * @return reconstructedSignal Speech signal post speech denoising */ private double[] overlapAndAdd(double[][] segments, double or ) { int ww = segments.length; int frames = segments[0].length; int start = 0; int stop = 0; int signalLength = (int)(ww * (1 - or ) * (frames - 1) + ww); double[] reconstructedSignal = new double[signalLength]; for (int i = 0; i < frames; i++) { start = (int)(i * ww * or ); stop = start + ww; for (int k = 0; k < ww; k++) { reconstructedSignal[start + k] = reconstructedSignal[start + k] + segments[k][i]; } } return reconstructedSignal; } public static void main(String[] args) { } }
package com.softstart.VectorImage; import java.awt.*; import java.awt.geom.*; import java.awt.image.ColorModel; import javax.swing.Icon; /** * An encapsulation of the Swing/AWT 2D drawing system. This class represents * a single image (like a icon or picture), but consisting of vector image * elements (like a line drawing). The idea is this "icon" will be drawn * repeatly on a 2D drawing suface, at different sizes and with different colors. * * This class is not abstract because the image is (or should be )created in * the constructor, rather than created during the drawing process. * * @version $Id$ * @author Thomas Jones-Low */ public class VectorImage implements Paint, Stroke, Shape, Icon { BasicStroke stroke; Color color; Shape shape; Shape transformedShape; boolean fillShape; public VectorImage () { fillShape = false; } public VectorImage(Shape newShape, Color newColor, BasicStroke newStroke) { shape = newShape; transformedShape = newShape; color = newColor; stroke = newStroke; fillShape = false; } public void drawImage(Graphics g) { drawImage((Graphics2D)g); } public void drawImage(Graphics2D g2) { Paint oldPaint; Stroke oldStroke; oldPaint = g2.getPaint(); oldStroke = g2.getStroke(); g2.setPaint(color); g2.setStroke(stroke); g2.draw(transformedShape); if (fillShape) g2.fill(transformedShape); g2.setPaint(oldPaint); g2.setStroke(oldStroke); } /** * From the Paint Interface: * Creates and returns a PaintContext used to generate the color pattern. * Since the ColorModel argument to createContext is only a hint, * implementations of Paint should accept a null argument for ColorModel. * Note that if the application does not prefer a specific ColorModel, the * null ColorModel argument will give the Paint implementation full leeway * in using the most efficient ColorModel it prefers for its raster * processing. * * Since the API documentation was not specific about this in releases * before 1.4, there may be implementations of Paint that do not accept a * null ColorModel argument. If a developer is writing code which passes a * null ColorModel argument to the createContext method of Paint objects * from arbitrary sources it would be wise to code defensively by * manufacturing a non-null ColorModel for those objects which throw a * NullPointerException. */ public PaintContext createContext(ColorModel cm, Rectangle deviceBounds, Rectangle2D userBounds, AffineTransform xform, RenderingHints hints) { return color.createContext(cm, deviceBounds, userBounds, xform, hints); } /** * From the Transparency interface * Returns the type of this Transparency. * @return the field type of this Transparency, which is either * OPAQUE, BITMASK or TRANSLUCENT. */ public int getTransparency() { return color.getTransparency(); } /** * Returns an outline Shape which encloses the area that should be painted * when the Shape is stroked according to the rules defined by the object * implementing the Stroke interface. * @param p - a shape to be stroked * @return the stroked outline Shape. */ public Shape createStrokedShape(Shape p) { return stroke.createStrokedShape(p); } /** * Returns an integer Rectangle that completely encloses the Shape. Note * that there is no guarantee that the returned Rectangle is the smallest * bounding box that encloses the Shape, only that the Shape lies entirely * within the indicated Rectangle. The returned Rectangle might also fail * to completely enclose the Shape if the Shape overflows the limited range * of the integer data type. The getBounds2D method generally returns a * tighter bounding box due to its greater flexibility in representation. * @return an integer Rectangle that completely encloses the Shape. */ public Rectangle getBounds() { return transformedShape.getBounds(); } /** * Returns a high precision and more accurate bounding box of the Shape * than the getBounds method. Note that there is no guarantee that the * returned Rectangle2D is the smallest bounding box that encloses the * Shape, only that the Shape lies entirely within the indicated * Rectangle2D. The bounding box returned by this method is usually tighter * than that returned by the getBounds method and never fails due to * overflow problems since the return value can be an instance of the * Rectangle2D that uses double precision values to store the dimensions. * @return an instance of Rectangle2D that is a high-precision bounding * box of the Shape. */ public Rectangle2D getBounds2D() { return transformedShape.getBounds2D(); } /** * Tests if the specified coordinates are inside the boundary of the Shape. */ public boolean contains(double x, double y) { return transformedShape.contains(x,y); } /** * Tests if a specified Point2D is inside the boundary of the Shape. */ public boolean contains(Point2D p) { return transformedShape.contains(p); } /** * Tests if the interior of the Shape intersects the interior of a specified * rectangular area. The rectangular area is considered to intersect the * Shape if any point is contained in both the interior of the Shape and the * specified rectangular area. * * This method might conservatively return true when: * * there is a high probability that the rectangular area and the * Shape intersect, but * * the calculations to accurately determine this intersection are * prohibitively expensive. * * This means that this method might return true even though the rectangular * area does not intersect the Shape. The Area class can be used to perform * more accurate computations of geometric intersection for any Shape object * if a more precise answer is required. */ public boolean contains(Rectangle2D r) { return transformedShape.contains(r); } public boolean contains(double x,double y, double w, double h) { return transformedShape.contains(x,y,w,h); } public boolean intersects(double x, double y, double w, double h) { return transformedShape.intersects(x, y, w, h); } public boolean intersects(Rectangle2D r) { return transformedShape.intersects(r); } public PathIterator getPathIterator(AffineTransform at) { return transformedShape.getPathIterator(at); } public PathIterator getPathIterator(AffineTransform at, double flatness) { return transformedShape.getPathIterator(at, flatness); } public void transform (AffineTransform at) { transformedShape = at.createTransformedShape(shape); } public void undoTransform () { transformedShape = shape; } public void setColor (Color newColor) { this.color = newColor; } public void setStroke (BasicStroke newStroke) { this.stroke = newStroke;} public void setFillShape(boolean fill) { fillShape = fill; } public void setShape (Shape newShape) { this.shape = newShape; this.transformedShape = newShape; } /** * Paints the icon. * The top-left corner of the icon is drawn at * the point (<code>x</code>, <code>y</code>) * in the coordinate space of the graphics context <code>g</code>. * If this icon has no image observer, * this method uses the <code>c</code> component * as the observer. * * @param c the component to be used as the observer * if this icon has no image observer * @param g the graphics context * @param x the X coordinate of the icon's top-left corner * @param y the Y coordinate of the icon's top-left corner */ public synchronized void paintIcon(Component c, Graphics g, int x, int y) { g.translate(x, y); drawImage (g); g.translate (-x, -y); } /** * Gets the width of the icon. * * @return the width in pixels of this icon */ public int getIconWidth() { return transformedShape.getBounds().width; } /** * Gets the height of the icon. * * @return the height in pixels of this icon */ public int getIconHeight() { return transformedShape.getBounds().height; } }
package com.google.ratel.deps.jackson.core.io; import java.io.*; import java.nio.ByteBuffer; import com.google.ratel.deps.jackson.core.SerializableString; /** * String token that can lazily serialize String contained and then reuse that * serialization later on. This is similar to JDBC prepared statements, for example, * in that instances should only be created when they are used more than use; * prime candidates are various serializers. *<p> * Class is final for performance reasons and since this is not designed to * be extensible or customizable (customizations would occur in calling code) */ public class SerializedString implements SerializableString, java.io.Serializable { protected final String _value; /* 13-Dec-2010, tatu: Whether use volatile or not is actually an important * decision for multi-core use cases. Cost of volatility can be non-trivial * for heavy use cases, and serialized-string instances are accessed often. * Given that all code paths with common Jackson usage patterns go through * a few memory barriers (mostly with cache/reuse pool access) it seems safe * enough to omit volatiles here, given how simple lazy initialization is. * This can be compared to how {@link String#intern} works; lazily and * without synchronization or use of volatile keyword. */ protected /*volatile*/ byte[] _quotedUTF8Ref; protected /*volatile*/ byte[] _unquotedUTF8Ref; protected /*volatile*/ char[] _quotedChars; public SerializedString(String v) { if (v == null) { throw new IllegalStateException("Null String illegal for SerializedString"); } _value = v; } /* /********************************************************** /* Serializable overrides /********************************************************** */ /** * Ugly hack, to work through the requirement that _value is indeed final, * and that JDK serialization won't call ctor(s). * * @since 2.1 */ protected transient String _jdkSerializeValue; private void readObject(ObjectInputStream in) throws IOException { _jdkSerializeValue = in.readUTF(); } private void writeObject(ObjectOutputStream out) throws IOException { out.writeUTF(_value); } protected Object readResolve() { return new SerializedString(_jdkSerializeValue); } /* /********************************************************** /* API /********************************************************** */ @Override public final String getValue() { return _value; } /** * Returns length of the String as characters */ @Override public final int charLength() { return _value.length(); } @Override public final char[] asQuotedChars() { char[] result = _quotedChars; if (result == null) { result = JsonStringEncoder.getInstance().quoteAsString(_value); _quotedChars = result; } return result; } /** * Accessor for accessing value that has been quoted using JSON * quoting rules, and encoded using UTF-8 encoding. */ @Override public final byte[] asUnquotedUTF8() { byte[] result = _unquotedUTF8Ref; if (result == null) { result = JsonStringEncoder.getInstance().encodeAsUTF8(_value); _unquotedUTF8Ref = result; } return result; } /** * Accessor for accessing value as is (without JSON quoting) * encoded using UTF-8 encoding. */ @Override public final byte[] asQuotedUTF8() { byte[] result = _quotedUTF8Ref; if (result == null) { result = JsonStringEncoder.getInstance().quoteAsUTF8(_value); _quotedUTF8Ref = result; } return result; } /* /********************************************************** /* Additional 2.0 methods for appending/writing contents /********************************************************** */ @Override public int appendQuotedUTF8(byte[] buffer, int offset) { byte[] result = _quotedUTF8Ref; if (result == null) { result = JsonStringEncoder.getInstance().quoteAsUTF8(_value); _quotedUTF8Ref = result; } final int length = result.length; if ((offset + length) > buffer.length) { return -1; } System.arraycopy(result, 0, buffer, offset, length); return length; } @Override public int appendQuoted(char[] buffer, int offset) { char[] result = _quotedChars; if (result == null) { result = JsonStringEncoder.getInstance().quoteAsString(_value); _quotedChars = result; } final int length = result.length; if ((offset + length) > buffer.length) { return -1; } System.arraycopy(result, 0, buffer, offset, length); return length; } @Override public int appendUnquotedUTF8(byte[] buffer, int offset) { byte[] result = _unquotedUTF8Ref; if (result == null) { result = JsonStringEncoder.getInstance().encodeAsUTF8(_value); _unquotedUTF8Ref = result; } final int length = result.length; if ((offset + length) > buffer.length) { return -1; } System.arraycopy(result, 0, buffer, offset, length); return length; } @Override public int appendUnquoted(char[] buffer, int offset) { String str = _value; final int length = str.length(); if ((offset + length) > buffer.length) { return -1; } str.getChars(0, length, buffer, offset); return length; } @Override public int writeQuotedUTF8(OutputStream out) throws IOException { byte[] result = _quotedUTF8Ref; if (result == null) { result = JsonStringEncoder.getInstance().quoteAsUTF8(_value); _quotedUTF8Ref = result; } final int length = result.length; out.write(result, 0, length); return length; } @Override public int writeUnquotedUTF8(OutputStream out) throws IOException { byte[] result = _unquotedUTF8Ref; if (result == null) { result = JsonStringEncoder.getInstance().encodeAsUTF8(_value); _unquotedUTF8Ref = result; } final int length = result.length; out.write(result, 0, length); return length; } @Override public int putQuotedUTF8(ByteBuffer buffer) { byte[] result = _quotedUTF8Ref; if (result == null) { result = JsonStringEncoder.getInstance().quoteAsUTF8(_value); _quotedUTF8Ref = result; } final int length = result.length; if (length > buffer.remaining()) { return -1; } buffer.put(result, 0, length); return length; } @Override public int putUnquotedUTF8(ByteBuffer buffer) { byte[] result = _unquotedUTF8Ref; if (result == null) { result = JsonStringEncoder.getInstance().encodeAsUTF8(_value); _unquotedUTF8Ref = result; } final int length = result.length; if (length > buffer.remaining()) { return -1; } buffer.put(result, 0, length); return length; } /* /********************************************************** /* Standard method overrides /********************************************************** */ @Override public final String toString() { return _value; } @Override public final int hashCode() { return _value.hashCode(); } @Override public final boolean equals(Object o) { if (o == this) return true; if (o == null || o.getClass() != getClass()) return false; SerializedString other = (SerializedString) o; return _value.equals(other._value); } }
package com.gdn.venice.client.app.logistic.view; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; import com.gdn.venice.client.app.DataConstantNameTokens; import com.gdn.venice.client.app.DataNameTokens; import com.gdn.venice.client.app.logistic.presenter.ActivityReportReconciliationPresenter; import com.gdn.venice.client.app.logistic.view.handlers.ActivityReportReconciliationUiHandlers; import com.gdn.venice.client.app.logistic.widgets.ActivityReportReconciliation; import com.gdn.venice.client.app.logistic.widgets.ActivityReportReconciliationProblem; import com.gdn.venice.client.app.logistic.widgets.MerchantPickupDetailWindow; import com.gdn.venice.client.presenter.MainPagePresenter; import com.gdn.venice.client.util.PrintUtility; import com.gdn.venice.client.util.Util; import com.gdn.venice.client.widgets.RafViewLayout; import com.google.gwt.core.client.GWT; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; import com.gwtplatform.mvp.client.ViewWithUiHandlers; import com.smartgwt.client.data.DSCallback; import com.smartgwt.client.data.DSRequest; import com.smartgwt.client.data.DSResponse; import com.smartgwt.client.data.DataSource; import com.smartgwt.client.data.Record; import com.smartgwt.client.rpc.RPCCallback; import com.smartgwt.client.rpc.RPCManager; import com.smartgwt.client.rpc.RPCRequest; import com.smartgwt.client.rpc.RPCResponse; import com.smartgwt.client.types.Alignment; import com.smartgwt.client.types.DateDisplayFormat; import com.smartgwt.client.types.Encoding; import com.smartgwt.client.widgets.Canvas; import com.smartgwt.client.widgets.IButton; import com.smartgwt.client.widgets.Label; import com.smartgwt.client.widgets.Window; import com.smartgwt.client.widgets.events.ClickEvent; import com.smartgwt.client.widgets.events.ClickHandler; import com.smartgwt.client.widgets.events.CloseClickHandler; import com.smartgwt.client.widgets.events.CloseClientEvent; import com.smartgwt.client.widgets.form.DynamicForm; import com.smartgwt.client.widgets.form.fields.DateTimeItem; import com.smartgwt.client.widgets.form.fields.SelectItem; import com.smartgwt.client.widgets.form.fields.UploadItem; import com.smartgwt.client.widgets.grid.CellFormatter; import com.smartgwt.client.widgets.grid.GroupNode; import com.smartgwt.client.widgets.grid.GroupTitleRenderer; import com.smartgwt.client.widgets.grid.ListGrid; import com.smartgwt.client.widgets.grid.ListGridField; import com.smartgwt.client.widgets.grid.ListGridRecord; import com.smartgwt.client.widgets.grid.events.CellClickEvent; import com.smartgwt.client.widgets.grid.events.CellClickHandler; import com.smartgwt.client.widgets.grid.events.FilterEditorSubmitEvent; import com.smartgwt.client.widgets.grid.events.FilterEditorSubmitHandler; import com.smartgwt.client.widgets.grid.events.SelectionChangedHandler; import com.smartgwt.client.widgets.grid.events.SelectionEvent; import com.smartgwt.client.widgets.layout.HLayout; import com.smartgwt.client.widgets.layout.VLayout; import com.smartgwt.client.widgets.toolbar.ToolStrip; import com.smartgwt.client.widgets.toolbar.ToolStripButton; /** * View for Activity Report Reconciliation * * @author Henry Chandra */ public class ActivityReportReconciliationView extends ViewWithUiHandlers<ActivityReportReconciliationUiHandlers> implements ActivityReportReconciliationPresenter.MyView { RafViewLayout activityReportReconciliationLayout; VLayout uploadLogVLayout; ListGrid uploadLogListGrid; ListGrid activityReportReconciliationListGrid; ToolStripButton submitForApprovalButton; ToolStripButton printButton; ToolStripButton exportButton; private ToolStripButton firstButton; private ToolStripButton nextButton; private ToolStripButton previousButton; private ToolStripButton lastButton; private final Label pageNumber; boolean bDisableSubmitForApprovalButton; Window uploadWindow; Window exportWindow; DynamicForm exportForm = new DynamicForm(); @Inject public ActivityReportReconciliationView() { uploadLogVLayout = new VLayout(); uploadLogVLayout.setHeight(120); uploadLogVLayout.setShowResizeBar(true); uploadLogVLayout.setHeight(150); activityReportReconciliationLayout = new RafViewLayout(); ToolStrip activityReportReconciliationToolStrip = new ToolStrip(); activityReportReconciliationToolStrip.setWidth100(); ToolStripButton uploadReport = new ToolStripButton(); uploadReport.setIcon("[SKIN]/icons/up.png"); uploadReport.setTooltip("Upload New Report"); uploadReport.setTitle("Upload"); uploadReport.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { buildUploadWindow().show(); } }); submitForApprovalButton = new ToolStripButton(); submitForApprovalButton.setIcon("[SKIN]/icons/process.png"); submitForApprovalButton.setTooltip("Submit for Approval"); submitForApprovalButton.setTitle("Submit"); printButton = new ToolStripButton(); printButton.setIcon("[SKIN]/icons/printer.png"); printButton.setTooltip("Print Activity Reconciliation Details"); printButton.setTitle("Print"); exportButton = new ToolStripButton(); exportButton.setIcon("[SKIN]/icons/notes_accept.png"); exportButton.setTooltip("Export Activity Reconciliation Details"); exportButton.setTitle("Export"); firstButton = new ToolStripButton("1"); firstButton.setTooltip("Current page"); firstButton.setDisabled(true); previousButton = new ToolStripButton("Prev"); previousButton.setTooltip("Go to previous page"); previousButton.setDisabled(true); nextButton = new ToolStripButton("Next"); nextButton.setTooltip("Go to next page"); lastButton = new ToolStripButton(); lastButton.setTooltip("Go to last page"); pageNumber = new Label(" 1 "); pageNumber.setTooltip("Current page"); pageNumber.setWidth(2); pageNumber.setAutoWidth(); pageNumber.setVisible(false); submitForApprovalButton.setDisabled(true); activityReportReconciliationToolStrip.addButton(uploadReport); activityReportReconciliationToolStrip.addSeparator(); activityReportReconciliationToolStrip.addButton(submitForApprovalButton); activityReportReconciliationToolStrip.addSeparator(); activityReportReconciliationToolStrip.addButton(printButton); activityReportReconciliationToolStrip.addSeparator(); activityReportReconciliationToolStrip.addButton(exportButton); // activityReportReconciliationToolStrip.addButton(previousButton); // activityReportReconciliationToolStrip.addButton(firstButton); // activityReportReconciliationToolStrip.addMember(pageNumber); // activityReportReconciliationToolStrip.addButton(lastButton); // activityReportReconciliationToolStrip.addButton(nextButton); activityReportReconciliationListGrid = new ActivityReportReconciliation() { @Override protected Canvas getExpansionComponent(ListGridRecord record) { String airwayBillId = record.getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLID); String airwayBillApprovalStatus = record.getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSDESC); DataSource reconciliationProblemDataSource = getUiHandlers().onExpandAirwayBillRow(airwayBillId); return new ActivityReportReconciliationProblem(airwayBillId, reconciliationProblemDataSource, airwayBillApprovalStatus); } @Override protected String getCellCSSText(ListGridRecord record, int rowNum, int colNum) { //Only color the status and approval status if (getFieldName(colNum).equals(DataNameTokens.LOGAIRWAYBILL_ACTIVITYRESULTSTATUS) || getFieldName(colNum).equals(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSID)) { String resultStatus = record.getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_ACTIVITYRESULTSTATUS); String approvalStatus = record.getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSDESC); if (resultStatus == null || resultStatus.isEmpty()) { return super.getCellCSSText(record, rowNum, colNum); } //if OK, color it light green if (resultStatus.toUpperCase().contains(DataConstantNameTokens.LOGAIRWAYBILL_RESULTSTATUS_OK.toUpperCase())) { return "background-color:#00FF00;"; } else if (resultStatus.toUpperCase().contains(DataConstantNameTokens.LOGAIRWAYBILL_RESULTSTATUS_PROBLEMEXISTS.toUpperCase())) { //if problem exists... if (approvalStatus!=null && approvalStatus.toUpperCase().contains(DataConstantNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS_APPROVALSTATUSDESC_APPROVED.toUpperCase())) { //...and approved, color it dark green return "color:#FFFFFF;background-color:#0e7365;"; } else { //...and not approved, color it yellow return "background-color:#ece355;"; } } else if (resultStatus.toUpperCase().contains(DataConstantNameTokens.LOGAIRWAYBILL_RESULTSTATUS_NODATAFROMMTA.toUpperCase()) || resultStatus.toUpperCase().contains(DataConstantNameTokens.LOGAIRWAYBILL_RESULTSTATUS_INVALIDGDNREF.toUpperCase())) { //if no data from MTA and invalid GDN Ref if (approvalStatus!=null && approvalStatus.toUpperCase().contains(DataConstantNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS_APPROVALSTATUSDESC_APPROVED.toUpperCase())) { //...and approved, color it dark green return "color:#FFFFFF;background-color:#0e7365;"; } else { //...and not approved, color it red return "color:#FFFFFF;background-color:#FF0000;"; } } } return super.getCellCSSText(record, rowNum, colNum); } }; activityReportReconciliationLayout.setMembers(uploadLogVLayout, activityReportReconciliationToolStrip); buildUploadLogListGrid(); bindCustomUiHandlers(); } private Window buildUploadWindow() { uploadWindow = new Window(); uploadWindow.setWidth(360); uploadWindow.setHeight(120); uploadWindow.setTitle("Upload Activity Report"); uploadWindow.setShowMinimizeButton(false); uploadWindow.setIsModal(true); uploadWindow.setShowModalMask(true); uploadWindow.centerInPage(); uploadWindow.addCloseClickHandler(new CloseClickHandler() { public void onCloseClick(CloseClientEvent event) { uploadWindow.destroy(); } }); VLayout uploadLayout = new VLayout(); uploadLayout.setHeight100(); uploadLayout.setWidth100(); final DynamicForm uploadForm = new DynamicForm(); uploadForm.setPadding(5); uploadForm.setEncoding(Encoding.MULTIPART); uploadForm.setTarget("upload_frame"); final SelectItem providerSelectItem = new SelectItem("Provider"); providerSelectItem.setTitle("Provider"); providerSelectItem.setValueMap(DataConstantNameTokens.LOGISTICPROVIDER_JNE, DataConstantNameTokens.LOGISTICPROVIDER_NCS, DataConstantNameTokens.LOGISTICPROVIDER_RPX, DataConstantNameTokens.LOGISTICPROVIDER_MSG); UploadItem reportFileItem = new UploadItem(); reportFileItem.setTitle("Activity Report"); uploadForm.setItems(providerSelectItem, reportFileItem); HLayout uploadCancelButtons = new HLayout(5); IButton buttonUpload = new IButton("Upload"); IButton buttonCancel = new IButton("Cancel"); buttonUpload.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { String host = GWT.getHostPageBaseURL(); /* * Change host to use Geronimo servlet URL in development */ if(host.contains(":8889")){ host = "http://localhost:8090/"; }else{ host = host.substring(0, host.lastIndexOf("/", host.length()-2)+1); } if (providerSelectItem.getValueAsString().equals(DataConstantNameTokens.LOGISTICPROVIDER_RPX)) { uploadForm.setAction(host + "Venice/ActivityReportImportRPXServlet?username=" + MainPagePresenter.signedInUser); } else if (providerSelectItem.getValueAsString().equals(DataConstantNameTokens.LOGISTICPROVIDER_NCS)) { uploadForm.setAction(host + "Venice/ActivityReportImportNCSServlet?username=" + MainPagePresenter.signedInUser); } else if (providerSelectItem.getValueAsString().equals(DataConstantNameTokens.LOGISTICPROVIDER_JNE)) { uploadForm.setAction(host + "Venice/ActivityReportImportJNEServlet?username=" + MainPagePresenter.signedInUser); } else if (providerSelectItem.getValueAsString().equals(DataConstantNameTokens.LOGISTICPROVIDER_MSG)) { uploadForm.setAction(host + "Venice/ActivityReportImportMSGServlet?username=" + MainPagePresenter.signedInUser); } uploadForm.submitForm(); uploadWindow.destroy(); // activityReportReconciliationListGrid.fetchData(new Criteria(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENORDER_WCSORDERID,"382532")); } }); buttonCancel.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { uploadWindow.destroy(); } }); uploadCancelButtons.setAlign(Alignment.CENTER); uploadCancelButtons.setMembers(buttonUpload, buttonCancel); uploadLayout.setMembers(uploadForm, uploadCancelButtons); uploadWindow.addItem(uploadLayout); return uploadWindow; } private Window buildExportReportFileWindow() { exportWindow = new Window(); exportWindow.setWidth(360); exportWindow.setHeight(170); exportWindow.setTitle("Export Activity Reconciliation"); exportWindow.setShowMinimizeButton(false); exportWindow.setIsModal(true); exportWindow.setShowModalMask(true); exportWindow.centerInPage(); exportWindow.addCloseClickHandler(new CloseClickHandler() { /* (non-Javadoc) * @see com.smartgwt.client.widgets.events.CloseClickHandler#onCloseClick(com.smartgwt.client.widgets.events.CloseClientEvent) */ public void onCloseClick(CloseClientEvent event) { exportWindow.destroy(); } }); VLayout exportLayout = new VLayout(); exportLayout.setHeight100(); exportLayout.setWidth100(); exportForm.setPadding(5); final SelectItem logisticProviderItem = new SelectItem(DataNameTokens.LOGLOGISTICSPROVIDER_LOGISTICSPROVIDERID); logisticProviderItem.setTitle("Logistic Provider"); logisticProviderItem.setWidth("120"); final SelectItem approvalStatusItem = new SelectItem(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSID); approvalStatusItem.setTitle("Approval Status"); approvalStatusItem.setWidth("120"); //Request approval combo RPCRequest requestApproval=new RPCRequest(); requestApproval = new RPCRequest(); requestApproval.setActionURL(GWT.getHostPageBaseURL() + "ActivityReportReconciliationPresenterServlet?method=fetchApprovalStatusComboBoxData&type=RPC"); requestApproval.setHttpMethod("POST"); requestApproval.setUseSimpleHttp(true); requestApproval.setShowPrompt(false); RPCManager.sendRequest(requestApproval, new RPCCallback () { public void execute(RPCResponse response, Object rawData, RPCRequest request) { String rpcResponseApproval = rawData.toString(); String xmlDataApproval = rpcResponseApproval; final LinkedHashMap<String, String> approvalMap = Util.formComboBoxMap(Util.formHashMapfromXML(xmlDataApproval)); approvalMap.put("all", "All"); //Request logistic combo RPCRequest requestLogistic = new RPCRequest(); requestLogistic.setActionURL(GWT.getHostPageBaseURL() + "ActivityReportReconciliationPresenterServlet?method=fetchLogisticProviderComboBoxData&type=RPC"); requestLogistic.setHttpMethod("POST"); requestLogistic.setUseSimpleHttp(true); requestLogistic.setShowPrompt(false); RPCManager.sendRequest(requestLogistic, new RPCCallback () { public void execute(RPCResponse response, Object rawData, RPCRequest request) { String rpcResponseLogistic = rawData.toString(); String xmlDataLogistic = rpcResponseLogistic; final LinkedHashMap<String, String> logisticMap = Util.formComboBoxMap(Util.formHashMapfromXML(xmlDataLogistic)); logisticMap.put("all", "All"); approvalStatusItem.setValueMap(approvalMap); logisticProviderItem.setValueMap(logisticMap); } }); } }); final SelectItem reconStatusItem = new SelectItem(DataNameTokens.LOGAIRWAYBILL_ACTIVITYRESULTSTATUS); reconStatusItem.setTitle("Recon Status"); LinkedHashMap<String, String> reconMap = new LinkedHashMap<String, String>(); reconMap.put("OK", "OK"); reconMap.put("Problem Exists", "Problem Exists"); reconMap.put("No Data from MTA", "No Data from MTA"); reconMap.put("None", "None"); reconMap.put("all", "All"); reconStatusItem.setWidth("120"); reconStatusItem.setValueMap(reconMap); final DateTimeItem exportDateItem = new DateTimeItem(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLTIMESTAMP); exportDateItem.setTitle("Order Date From"); exportDateItem.setDateFormatter(DateDisplayFormat.TOEUROPEANSHORTDATETIME); exportDateItem.setWidth("140"); exportForm.setFields( logisticProviderItem, approvalStatusItem, reconStatusItem, exportDateItem ); HLayout exportButtons = new HLayout(5); IButton buttonExport = new IButton("Export"); IButton buttonCancel = new IButton("Cancel"); buttonExport.addClickHandler(new ClickHandler() { /* (non-Javadoc) * @see com.smartgwt.client.widgets.events.ClickHandler#onClick(com.smartgwt.client.widgets.events.ClickEvent) */ @Override public void onClick(ClickEvent event) { //If in debug mode then change the host URL to the servlet in the server side String host = GWT.getHostPageBaseURL(); if(host.contains("8889")){ host = "http://localhost:8090/"; } /* * Somehow when the app is deployed in Geronimo the getHostPageBaseURL call * adds the context root of "Venice/" as it is the web application. * This does not happen in development mode as it is running in the root * of the Jetty servlet container. * * Consequently the context root needs to be removed because the servlet * being called has its own context root in a different web application. */ if(host.contains("Venice/")){ host = host.substring(0, host.indexOf("Venice/")); } exportForm.setAction(host + "Venice/ActivityReportExportServlet?logistic="+logisticProviderItem.getValueAsString()+"&approval="+approvalStatusItem.getValueAsString()+"&recon="+reconStatusItem.getValueAsString()+"&date="+exportDateItem.getValue().toString()); exportForm.submitForm(); exportWindow.destroy(); } }); buttonCancel.addClickHandler(new ClickHandler() { /* (non-Javadoc) * @see com.smartgwt.client.widgets.events.ClickHandler#onClick(com.smartgwt.client.widgets.events.ClickEvent) */ @Override public void onClick(ClickEvent event) { exportWindow.destroy(); } }); exportButtons.setAlign(Alignment.CENTER); exportButtons.setMembers(buttonExport, buttonCancel); exportLayout.setMembers(exportForm, exportButtons); exportWindow.addItem(exportLayout); return exportWindow; } @Override public Widget asWidget() { return activityReportReconciliationLayout; } private ListGrid buildUploadLogListGrid() { uploadLogListGrid = new ListGrid(); uploadLogListGrid.setWidth100(); uploadLogListGrid.setHeight100(); uploadLogListGrid.setShowAllRecords(true); uploadLogListGrid.setSortField(0); uploadLogListGrid.setCanResizeFields(true); uploadLogListGrid.setShowRowNumbers(true); uploadLogListGrid.setShowFilterEditor(true); // uploadLogListGrid.setAutoFetchData(true); return uploadLogListGrid; } protected void bindCustomUiHandlers() { printButton.addClickHandler(new ClickHandler() { /* (non-Javadoc) * @see com.smartgwt.client.widgets.events.ClickHandler#onClick(com.smartgwt.client.widgets.events.ClickEvent) */ @Override public void onClick(ClickEvent event) { PrintUtility.printComponent(activityReportReconciliationListGrid); } }); submitForApprovalButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { ListGridRecord[] selectedRecords = activityReportReconciliationListGrid.getSelection(); ArrayList<String> airwayBillIds = new ArrayList<String>(); for (int i=0;i<selectedRecords.length;i++) { airwayBillIds.add(selectedRecords[i].getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLID)); } getUiHandlers().onSubmitForApproval(airwayBillIds); } }); activityReportReconciliationListGrid.addSelectionChangedHandler(new SelectionChangedHandler() { @Override public void onSelectionChanged(SelectionEvent event) { ListGridRecord[] selectedRecords = activityReportReconciliationListGrid.getSelection(); if (selectedRecords.length==0) { //If no records selected, disable Submit For Approval Button submitForApprovalButton.setDisabled(true); } else { for (int i=0;i<selectedRecords.length;i++) { //If it has been submitted or "No Data from MTA", disable Submit For Approval Button //also disable Submit For Approval Button if it has been approved bDisableSubmitForApprovalButton = selectedRecords[i].getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSDESC).equals(DataConstantNameTokens.LOGAPPROVALSTATUS_APPROVALSTATUSDESC_SUBMITTED) || selectedRecords[i].getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_ACTIVITYRESULTSTATUS).equals(DataConstantNameTokens.LOGAIRWAYBILL_RESULTSTATUS_NODATAFROMMTA)|| selectedRecords[i].getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSDESC).equals(DataConstantNameTokens.LOGAPPROVALSTATUS_APPROVALSTATUSDESC_APPROVED); submitForApprovalButton.setDisabled(bDisableSubmitForApprovalButton); if (bDisableSubmitForApprovalButton) { break; } else { //If it hasn't been submitted, //If the status is "Problem Exists", get the Problem's DataSource and fetch the data String resultStatus = selectedRecords[i].getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_ACTIVITYRESULTSTATUS); if (resultStatus!=null && resultStatus.equals(DataConstantNameTokens.LOGAIRWAYBILL_RESULTSTATUS_PROBLEMEXISTS)) { String airwayBillId = selectedRecords[i].getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLID); DataSource reconciliationProblemDataSource = getUiHandlers().onExpandAirwayBillRow(airwayBillId); reconciliationProblemDataSource.fetchData(null, new DSCallback() { @Override public void execute(DSResponse response, Object rawData, DSRequest request) { if (bDisableSubmitForApprovalButton) { //if already disabled for one record, no need to check the rest return; } Record[] records = response.getData(); for (int j=0;j<records.length;j++) { //If there are problems with no "Action Applied" (null or empty), disable Submit For Approval Button bDisableSubmitForApprovalButton = records[j].getAttributeAsString(DataNameTokens.LOGACTIVITYRECONRECORD_LOGACTIONAPPLIED_ACTIONAPPLIEDID) == null || records[j].getAttributeAsString(DataNameTokens.LOGACTIVITYRECONRECORD_LOGACTIONAPPLIED_ACTIONAPPLIEDID).isEmpty(); submitForApprovalButton.setDisabled(bDisableSubmitForApprovalButton); if (bDisableSubmitForApprovalButton) { break; } } } }); } if (bDisableSubmitForApprovalButton) { break; } } } } } }); activityReportReconciliationListGrid.addCellClickHandler(new CellClickHandler() { @Override public void onCellClick(CellClickEvent event) { if (activityReportReconciliationListGrid.getField(event.getColNum()).getName().equals(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENMERCHANTPRODUCT_VENMERCHANT_VENPARTY_FULLORLEGALNAME)) { String airwayBillId = event.getRecord().getAttributeAsString(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLID); Window merchantPickupDetailWindow = new MerchantPickupDetailWindow(getUiHandlers().onShowMerchantPickUpDetail(airwayBillId)); merchantPickupDetailWindow.show(); } } }); exportButton.addClickHandler(new ClickHandler() { /* (non-Javadoc) * @see com.smartgwt.client.widgets.events.ClickHandler#onClick(com.smartgwt.client.widgets.events.ClickEvent) */ @Override public void onClick(ClickEvent event) { buildExportReportFileWindow().show(); } }); firstButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { // TODO Auto-generated method stub goToPage(1); } }); nextButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { // TODO Auto-generated method stub goToPage(Integer.parseInt(pageNumber.getContents().trim()) + 1); } }); lastButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { // TODO Auto-generated method stub goToPage(Integer.parseInt(lastButton.getTitle())); } }); previousButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { // TODO Auto-generated method stub goToPage(Integer.parseInt(pageNumber.getContents().trim()) - 1); } }); uploadLogListGrid.addFilterEditorSubmitHandler(new FilterEditorSubmitHandler() { /* (non-Javadoc) * @see com.smartgwt.client.widgets.grid.events.FilterEditorSubmitHandler#onFilterEditorSubmit(com.smartgwt.client.widgets.grid.events.FilterEditorSubmitEvent) */ @Override public void onFilterEditorSubmit(FilterEditorSubmitEvent event) { refreshUploadLogListGridData(); } }); } @Override public void loadUploadLogData(DataSource dataSource) { uploadLogListGrid.setDataSource(dataSource); uploadLogListGrid.setFields(Util.getListGridFieldsFromDataSource(dataSource)); uploadLogListGrid.setAutoFetchData(false); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_FILEUPLOADLOGID).setWidth(75); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_FILEUPLOADNAME).setHidden(true); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_FILEUPLOADNAMEANDLOC).setWidth(120); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_ACTUALFILEUPLOADNAME).setWidth(200); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_FILEUPLOADFORMAT).setWidth(80); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_FAILEDFILEUPLOADNAMEANDLOC).setWidth(200); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_UPLOADSTATUS).setWidth(100); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_TIMESTAMP).setWidth(100); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_USERNAME).setWidth(100); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_FILEUPLOADNAMEANDLOC).setCellFormatter(new CellFormatter() { @Override public String format(Object value, ListGridRecord record, int rowNum, int colNum) { String cellFormat = (String) value; cellFormat = cellFormat.substring(cellFormat.lastIndexOf("/")+1, cellFormat.length()); return "<a href='" + GWT.getHostPageBaseURL() + MainPagePresenter.fileDownloadPresenterServlet + "?filename=" + value + "' target='_blank'>" + cellFormat + "</a>"; } }); uploadLogListGrid.getField(DataNameTokens.LOGFILEUPLOADLOG_FAILEDFILEUPLOADNAMEANDLOC).setCellFormatter(new CellFormatter() { @Override public String format(Object value, ListGridRecord record, int rowNum, int colNum) { String cellFormat = (String) value; cellFormat = cellFormat.substring(cellFormat.lastIndexOf("/")+1, cellFormat.length()); return "<a href='" + GWT.getHostPageBaseURL() + MainPagePresenter.fileDownloadPresenterServlet + "?filename=" + value + "' target='_blank'>" + cellFormat + "</a>"; } }); uploadLogVLayout.addMember(uploadLogListGrid); } @Override public void refreshUploadLogListGridData() { DSCallback callBack = new DSCallback() { /* (non-Javadoc) * @see com.smartgwt.client.data.DSCallback#execute(com.smartgwt.client.data.DSResponse, java.lang.Object, com.smartgwt.client.data.DSRequest) */ @Override public void execute(DSResponse response, Object rawData, DSRequest request) { uploadLogListGrid.setData(response.getData()); } }; uploadLogListGrid.getDataSource().fetchData(uploadLogListGrid.getFilterEditorCriteria(), callBack); } @Override public void loadAirwayBillData(DataSource dataSource, Map<String,String> approval, Map<String,String> status) { dataSource.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENORDERSTATUS_ORDERSTATUSID).setValueMap(status); dataSource.getField(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSID).setValueMap(approval); LinkedHashMap<String, String> reconStatus= new LinkedHashMap<String, String>(); reconStatus.put("OK", "OK"); reconStatus.put("Problem Exists", "Problem Exists"); reconStatus.put("No Data from MTA", "No Data from MTA"); dataSource.getField(DataNameTokens.LOGAIRWAYBILL_ACTIVITYRESULTSTATUS).setValueMap(reconStatus); activityReportReconciliationListGrid.setDataSource(dataSource); activityReportReconciliationListGrid.setFields(Util.getListGridFieldsFromDataSource(dataSource)); activityReportReconciliationListGrid.setAutoFetchData(false); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLID).setWidth(75); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLID).setHidden(true); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENORDER_WCSORDERID).setWidth(75); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_WCSORDERITEMID).setWidth(75); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_GDNREFERENCE).setWidth(120); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_ACTIVITYRESULTSTATUS).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLTIMESTAMP).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENORDERSTATUS_ORDERSTATUSID).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENORDERSTATUS_ORDERSTATUSCODE).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENORDERSTATUS_ORDERSTATUSCODE).setHidden(true); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENMERCHANTPRODUCT_VENMERCHANT_VENPARTY_FULLORLEGALNAME).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLPICKUPDATETIME).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_AIRWAYBILLNUMBER).setWidth(120); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSID).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSDESC).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_LOGAPPROVALSTATUS2_APPROVALSTATUSDESC).setHidden(true); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_ACTIVITYAPPROVEDBYUSERID).setWidth(125); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_ACTIVITYFILENAMEANDLOC).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENORDER_ORDERDATE).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_LOGMERCHANTPICKUPINSTRUCTIONS_VENADDRESS_VENCITY_CITYNAME).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_LOGMERCHANTPICKUPINSTRUCTIONS_VENADDRESS_VENCITY_CITYNAME).setCanFilter(false); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_LOGLOGISTICSPROVIDER_LOGISTICSPROVIDERCODE).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_LOGLOGISTICSPROVIDER_LOGISTICSPROVIDERCODE).setCanFilter(false); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_DESTINATION).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_ZIP).setWidth(100); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_VENORDERITEM_VENMERCHANTPRODUCT_VENMERCHANT_VENPARTY_FULLORLEGALNAME).setCellFormatter(new CellFormatter() { @Override public String format(Object value, ListGridRecord record, int rowNum, int colNum) { return "<span style='color:blue;text-decoration:underline;cursor:hand;cursor:pointer'>"+value+"</span>"; } }); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_ACTIVITYFILENAMEANDLOC).setCellFormatter(new CellFormatter() { @Override public String format(Object value, ListGridRecord record, int rowNum, int colNum) { String cellFormat = (String) value; cellFormat = cellFormat.substring(cellFormat.lastIndexOf("/")+1, cellFormat.length()); return "<a href='" + GWT.getHostPageBaseURL() + MainPagePresenter.fileDownloadPresenterServlet + "?filename=" + value + "' target='_blank'>" + cellFormat + "</a>"; } }); activityReportReconciliationListGrid.getField(DataNameTokens.LOGAIRWAYBILL_ACTIVITYFILENAMEANDLOC).setGroupTitleRenderer(new GroupTitleRenderer() { @Override public String getGroupTitle(Object groupValue, GroupNode groupNode, ListGridField field, String fieldName, ListGrid grid) { String groupTitle = (String) groupValue; if (!groupTitle.startsWith("/")) { return groupTitle; } groupTitle = groupTitle.substring(groupTitle.lastIndexOf("/")+1, groupTitle.length()); return "<a href='" + GWT.getHostPageBaseURL() + MainPagePresenter.fileDownloadPresenterServlet + "?filename=" + groupValue + "' target='_blank'>" + groupTitle + "</a>"; } }); activityReportReconciliationListGrid.groupBy(DataNameTokens.LOGAIRWAYBILL_ACTIVITYFILENAMEANDLOC); activityReportReconciliationLayout.addMember(activityReportReconciliationListGrid); } @Override public void refreshAirwayBillData() { if (activityReportReconciliationListGrid instanceof ActivityReportReconciliation) { ((ActivityReportReconciliation) activityReportReconciliationListGrid).refreshAirwayBillData(); } } public void goToPage(int pageNum) { if (pageNum < 1) pageNum = 1; pageNumber.setContents(" " + pageNum + " "); updatePage(pageNum); } public void updatePage(int pageNum) { getUiHandlers().onFetchComboBoxData((pageNum - 1) * 50, 50); if(pageNum == 1 && pageNum == Integer.parseInt(lastButton.getTitle().trim())){ previousButton.setDisabled(true); firstButton.setDisabled(true); lastButton.setDisabled(true); nextButton.setDisabled(true); pageNumber.setVisible(false); } else if(pageNum == 1){ previousButton.setDisabled(true); firstButton.setDisabled(true); lastButton.setDisabled(false); nextButton.setDisabled(false); pageNumber.setVisible(false); } else{ if(pageNum == Integer.parseInt(lastButton.getTitle().trim())){ previousButton.setDisabled(false); firstButton.setDisabled(false); lastButton.setDisabled(true); nextButton.setDisabled(true); pageNumber.setVisible(false); } else{ previousButton.setDisabled(false); firstButton.setDisabled(false); lastButton.setDisabled(false); nextButton.setDisabled(false); pageNumber.setVisible(true); } } } @Override public void setLastPage(int totalRows){ int lastPage = totalRows/50+1; lastButton.setTitle(" "+lastPage+" "); if(Integer.parseInt(lastButton.getTitle().trim()) == 1) { lastButton.setVisible(false); nextButton.setDisabled(true); } else { lastButton.setVisible(true); nextButton.setDisabled(false); } } }
package com.groupon.lex.metrics.history.xdr.support; import com.google.common.collect.ImmutableMap; import com.groupon.lex.metrics.GroupName; import com.groupon.lex.metrics.Histogram; import com.groupon.lex.metrics.MetricName; import com.groupon.lex.metrics.MetricValue; import com.groupon.lex.metrics.SimpleGroupPath; import com.groupon.lex.metrics.Tags; import com.groupon.lex.metrics.history.TSDataVersionDispatch.Releaseable; import com.groupon.lex.metrics.history.v2.list.FileListFileSupport; import com.groupon.lex.metrics.history.v2.tables.FileTableFileSupport; import com.groupon.lex.metrics.history.xdr.Const; import com.groupon.lex.metrics.timeseries.ImmutableTimeSeriesValue; import com.groupon.lex.metrics.timeseries.SimpleTimeSeriesCollection; import com.groupon.lex.metrics.timeseries.TimeSeriesCollection; import com.groupon.lex.metrics.timeseries.TimeSeriesValue; import java.io.File; import java.io.IOException; import static java.lang.Math.sqrt; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Files; import java.nio.file.Path; import static java.nio.file.StandardOpenOption.CREATE; import static java.nio.file.StandardOpenOption.READ; import static java.nio.file.StandardOpenOption.WRITE; import java.util.Collection; import static java.util.Collections.singletonMap; import java.util.List; import java.util.Random; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; /** * Utilities for creating and compressing TSData files. * * @author ariane */ @RequiredArgsConstructor public class FileSupport { public static interface Writer { public void create_file(Releaseable<FileChannel> fd, Collection<? extends TimeSeriesCollection> tsdata, boolean compress) throws IOException; public short getMajor(); public short getMinor(); public default boolean isEmptyAllowed() { return true; } } public static final Writer NO_WRITER = new Writer() { @Override public void create_file(Releaseable<FileChannel> fd, Collection<? extends TimeSeriesCollection> tsdata, boolean compress) throws IOException { throw new UnsupportedOperationException("Not a writer."); } @Override public short getMajor() { return Const.MAJOR; } @Override public short getMinor() { return Const.MINOR; } }; @NonNull private final Writer writer; @Getter private final boolean compressed; public final DateTime NOW = new DateTime(DateTimeZone.UTC); public short getMajor() { return writer.getMajor(); } public short getMinor() { return writer.getMinor(); } public boolean isEmptyAllowed() { return writer.isEmptyAllowed(); } /** * Create a file. */ public void create_file(Path file, Collection<? extends TimeSeriesCollection> tsdata) throws IOException { try (Releaseable<FileChannel> fd = new Releaseable<>(FileChannel.open(file, READ, WRITE, CREATE))) { writer.create_file(fd, tsdata, compressed); } catch (IOException | RuntimeException ex) { Files.delete(file); throw ex; } } /** * Generates an endless stream of TimeSeriesCollections. */ public StreamedCollection<TimeSeriesCollection> create_tsdata(int width) { int metric_width = (int) sqrt(width) + 1; int group_width = (width + metric_width - 1) / metric_width; final SimpleGroupPath base_path = SimpleGroupPath.valueOf("foo", "bar"); final List<GroupName> group_names = Stream.generate(new CounterSupplier()) .limit(group_width) .map(idx -> Tags.valueOf(singletonMap("instance", MetricValue.fromIntValue(idx)))) .map(tags -> GroupName.valueOf(base_path, tags)) .collect(Collectors.toList()); final List<MetricName> metric_names = Stream.generate(new CounterSupplier()) .limit(metric_width) .map(i -> (MetricName.valueOf("x", String.valueOf(i)))) .collect(Collectors.toList()); return new StreamedCollection<>(() -> Stream.generate(new CounterSupplier())) .map((Integer i) -> { final DateTime now = NOW.plusSeconds(5 * i); final Random rnd = new Random(Integer.hashCode(i)); // Deterministic RNG. final Stream<TimeSeriesValue> tsv_stream = group_names.stream() .map(name -> { return new ImmutableTimeSeriesValue( name, metric_names.stream(), Function.identity(), (ignored) -> MetricValue.fromIntValue(rnd.nextLong())); }); return new SimpleTimeSeriesCollection(now, tsv_stream); }); } private static class CounterSupplier implements Supplier<Integer> { private int idx = 0; @Override public Integer get() { return idx++; } } public static ByteBuffer createSingleBuffer(Collection<ByteBuffer> bufs) { final int totalLength = bufs.stream() .mapToInt(ByteBuffer::limit) .sum(); final ByteBuffer out = ByteBuffer.allocateDirect(totalLength); bufs.forEach(out::put); out.flip(); return out; } /** * Generate sample data files. */ public static void main(String[] args) throws IOException { final List<TimeSeriesCollection> tsdata = Stream.<TimeSeriesCollection>builder() .add(new SimpleTimeSeriesCollection( DateTime.parse("1980-01-01T08:00:00.000Z"), Stream.of( new ImmutableTimeSeriesValue( GroupName.valueOf( SimpleGroupPath.valueOf("test", "histogram"), Tags.valueOf(singletonMap("true", MetricValue.TRUE))), singletonMap( MetricName.valueOf("hist", "o", "gram"), MetricValue.fromHistValue( new Histogram( new Histogram.RangeWithCount(0, 1, 2), new Histogram.RangeWithCount(3, 4, 5))))), new ImmutableTimeSeriesValue( GroupName.valueOf( SimpleGroupPath.valueOf("test", "int"), Tags.valueOf(singletonMap("false", MetricValue.FALSE))), singletonMap( MetricName.valueOf("i", "n", "t"), MetricValue.fromIntValue(42))) ))) .add(new SimpleTimeSeriesCollection( DateTime.parse("1990-01-01T09:00:00.000Z"), Stream.of( new ImmutableTimeSeriesValue( GroupName.valueOf( SimpleGroupPath.valueOf("test", "histogram"), Tags.EMPTY), singletonMap( MetricName.valueOf("hist", "o", "gram"), MetricValue.fromHistValue( new Histogram( new Histogram.RangeWithCount(0, 1, 2), new Histogram.RangeWithCount(3, 4, 5))))), new ImmutableTimeSeriesValue( GroupName.valueOf( SimpleGroupPath.valueOf("test", "flt"), Tags.EMPTY), singletonMap( MetricName.valueOf("f", "l", "o", "a", "t"), MetricValue.fromDblValue(Math.E))), new ImmutableTimeSeriesValue( GroupName.valueOf( SimpleGroupPath.valueOf("test", "empty"), Tags.EMPTY), singletonMap( MetricName.valueOf("value"), MetricValue.EMPTY)), new ImmutableTimeSeriesValue( GroupName.valueOf( SimpleGroupPath.valueOf("test", "string"), Tags.EMPTY), ImmutableMap.<MetricName, MetricValue>builder() .put( MetricName.valueOf("value"), MetricValue.fromStrValue("a string")) .put( MetricName.valueOf("another"), MetricValue.fromStrValue("string")) .build()) ))) .build() .collect(Collectors.toList()); new FileSupport(new FileSupport0(), true) .create_file(new File("/tmp/tsdata_v0.tsd").toPath(), tsdata); new FileSupport(new FileSupport1(), true) .create_file(new File("/tmp/tsdata_v1.tsd").toPath(), tsdata); new FileSupport(new FileTableFileSupport(), true) .create_file(new File("/tmp/tsdata_v2_tables.tsd").toPath(), tsdata); new FileSupport(new FileListFileSupport(), true) .create_file(new File("/tmp/tsdata_v2_list.tsd").toPath(), tsdata); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.update; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class UpdateIT extends ESIntegTestCase { private static final String UPDATE_SCRIPTS = "update_scripts"; private static final String PUT_VALUES_SCRIPT = "put_values"; private static final String FIELD_INC_SCRIPT = "field_inc"; private static final String UPSERT_SCRIPT = "scripted_upsert"; private static final String EXTRACT_CTX_SCRIPT = "extract_ctx"; public static class UpdateScriptsPlugin extends MockScriptPlugin { @Override public String pluginScriptLang() { return UPDATE_SCRIPTS; } @Override protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() { Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>(); scripts.put(PUT_VALUES_SCRIPT, vars -> { Map<String, Object> ctx = (Map<String, Object>) vars.get("ctx"); assertNotNull(ctx); Map<String, Object> params = new HashMap<>((Map<String, Object>) vars.get("params")); Map<String, Object> newCtx = (Map<String, Object>) params.remove("_ctx"); if (newCtx != null) { assertFalse(newCtx.containsKey("_source")); ctx.putAll(newCtx); } Map<String, Object> source = (Map<String, Object>) ctx.get("_source"); params.remove("ctx"); source.putAll(params); return ctx; }); scripts.put(FIELD_INC_SCRIPT, vars -> { Map<String, Object> params = (Map<String, Object>) vars.get("params"); String fieldname = (String) vars.get("field"); Map<String, Object> ctx = (Map<String, Object>) vars.get("ctx"); assertNotNull(ctx); Map<String, Object> source = (Map<String, Object>) ctx.get("_source"); Number currentValue = (Number) source.get(fieldname); Number inc = (Number) params.getOrDefault("inc", 1); source.put(fieldname, currentValue.longValue() + inc.longValue()); return ctx; }); scripts.put(UPSERT_SCRIPT, vars -> { Map<String, Object> ctx = (Map<String, Object>) vars.get("ctx"); assertNotNull(ctx); Map<String, Object> source = (Map<String, Object>) ctx.get("_source"); Number payment = (Number) vars.get("payment"); Number oldBalance = (Number) source.get("balance"); int deduction = "create".equals(ctx.get("op")) ? payment.intValue() / 2 : payment.intValue(); source.put("balance", oldBalance.intValue() - deduction); return ctx; }); scripts.put(EXTRACT_CTX_SCRIPT, vars -> { Map<String, Object> ctx = (Map<String, Object>) vars.get("ctx"); assertNotNull(ctx); Map<String, Object> source = (Map<String, Object>) ctx.get("_source"); Map<String, Object> ctxWithoutSource = new HashMap<>(ctx); ctxWithoutSource.remove("_source"); source.put("update_context", ctxWithoutSource); return ctx; }); return scripts; } } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(UpdateScriptsPlugin.class, InternalSettingsPlugin.class); } private void createTestIndex() throws Exception { logger.info("--> creating index test"); assertAcked(prepareCreate("test").addAlias(new Alias("alias").writeIndex(randomFrom(true, null)))); } public void testUpsert() throws Exception { createTestIndex(); ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(fieldIncScript) .execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1")); } updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(fieldIncScript) .execute().actionGet(); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("2")); } } public void testScriptedUpsert() throws Exception { createTestIndex(); ensureGreen(); // Script logic is // 1) New accounts take balance from "balance" in upsert doc and first payment is charged at 50% // 2) Existing accounts subtract full payment from balance stored in elasticsearch int openingBalance=10; Map<String, Object> params = new HashMap<>(); params.put("payment", 2); // Pay money from what will be a new account and opening balance comes from upsert doc // provided by client UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("balance", openingBalance).endObject()) .setScriptedUpsert(true) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, UPSERT_SCRIPT, params)) .execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("balance").toString(), equalTo("9")); } // Now pay money for an existing account where balance is stored in es updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("balance", openingBalance).endObject()) .setScriptedUpsert(true) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, UPSERT_SCRIPT, params)) .execute().actionGet(); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("balance").toString(), equalTo("7")); } } public void testUpsertDoc() throws Exception { createTestIndex(); ensureGreen(); UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setDocAsUpsert(true) .setFetchSource(true) .execute().actionGet(); assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult(), notNullValue()); assertThat(updateResponse.getGetResult().getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult().sourceAsMap().get("bar").toString(), equalTo("baz")); } // Issue #3265 public void testNotUpsertDoc() throws Exception { createTestIndex(); ensureGreen(); assertThrows(client().prepareUpdate(indexOrAlias(), "type1", "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setDocAsUpsert(false) .setFetchSource(true) .execute(), DocumentMissingException.class); } public void testUpsertFields() throws Exception { createTestIndex(); ensureGreen(); UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) .setFetchSource(true) .execute().actionGet(); assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult(), notNullValue()); assertThat(updateResponse.getGetResult().getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult().sourceAsMap().get("bar").toString(), equalTo("baz")); assertThat(updateResponse.getGetResult().sourceAsMap().get("extra"), nullValue()); updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) .setFetchSource(true) .execute().actionGet(); assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult(), notNullValue()); assertThat(updateResponse.getGetResult().getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult().sourceAsMap().get("bar").toString(), equalTo("baz")); assertThat(updateResponse.getGetResult().sourceAsMap().get("extra").toString(), equalTo("foo")); } public void testIndexAutoCreation() throws Exception { UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) .setFetchSource(true) .execute().actionGet(); assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult(), notNullValue()); assertThat(updateResponse.getGetResult().getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult().sourceAsMap().get("bar").toString(), equalTo("baz")); assertThat(updateResponse.getGetResult().sourceAsMap().get("extra"), nullValue()); } public void testUpdate() throws Exception { createTestIndex(); ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); DocumentMissingException ex = expectThrows(DocumentMissingException.class, () -> client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet()); assertEquals("[type1][1]: document missing", ex.getMessage()); client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(fieldIncScript).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(2L)); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("2")); } Map<String, Object> params = new HashMap<>(); params.put("inc", 3); params.put("field", "field"); updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, params)).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(3L)); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("5")); } // check noop updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("_ctx", Collections.singletonMap("op", "none")))).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(3L)); assertEquals(DocWriteResponse.Result.NOOP, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("5")); } // check delete updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("_ctx", Collections.singletonMap("op", "delete")))).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(4L)); assertEquals(DocWriteResponse.Result.DELETED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); } // check _source parameter client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet(); updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field1"))) .setFetchSource("field1", "field2") .get(); assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult(), notNullValue()); assertThat(updateResponse.getGetResult().getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult().sourceRef(), notNullValue()); assertThat(updateResponse.getGetResult().field("field1"), nullValue()); assertThat(updateResponse.getGetResult().sourceAsMap().size(), equalTo(1)); assertThat(updateResponse.getGetResult().sourceAsMap().get("field1"), equalTo(2)); // check updates without script // add new field client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet(); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1")); assertThat(getResponse.getSourceAsMap().get("field2").toString(), equalTo("2")); } // change existing field client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet(); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("3")); assertThat(getResponse.getSourceAsMap().get("field2").toString(), equalTo("2")); } // recursive map Map<String, Object> testMap = new HashMap<>(); Map<String, Object> testMap2 = new HashMap<>(); Map<String, Object> testMap3 = new HashMap<>(); testMap3.put("commonkey", testMap); testMap3.put("map3", 5); testMap2.put("map2", 6); testMap.put("commonkey", testMap2); testMap.put("map1", 8); client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet(); client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet(); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); Map map1 = (Map) getResponse.getSourceAsMap().get("map"); assertThat(map1.size(), equalTo(3)); assertThat(map1.containsKey("map1"), equalTo(true)); assertThat(map1.containsKey("map3"), equalTo(true)); assertThat(map1.containsKey("commonkey"), equalTo(true)); Map map2 = (Map) map1.get("commonkey"); assertThat(map2.size(), equalTo(3)); assertThat(map2.containsKey("map1"), equalTo(true)); assertThat(map2.containsKey("map2"), equalTo(true)); assertThat(map2.containsKey("commonkey"), equalTo(true)); } } public void testUpdateRequestWithBothScriptAndDoc() throws Exception { createTestIndex(); ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); try { client().prepareUpdate(indexOrAlias(), "type1", "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(fieldIncScript) .execute().actionGet(); fail("Should have thrown ActionRequestValidationException"); } catch (ActionRequestValidationException e) { assertThat(e.validationErrors().size(), equalTo(1)); assertThat(e.validationErrors().get(0), containsString("can't provide both script and doc")); assertThat(e.getMessage(), containsString("can't provide both script and doc")); } } public void testUpdateRequestWithScriptAndShouldUpsertDoc() throws Exception { createTestIndex(); ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); try { client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(fieldIncScript) .setDocAsUpsert(true) .execute().actionGet(); fail("Should have thrown ActionRequestValidationException"); } catch (ActionRequestValidationException e) { assertThat(e.validationErrors().size(), equalTo(1)); assertThat(e.validationErrors().get(0), containsString("doc must be specified if doc_as_upsert is enabled")); assertThat(e.getMessage(), containsString("doc must be specified if doc_as_upsert is enabled")); } } public void testContextVariables() throws Exception { assertAcked(prepareCreate("test") .addAlias(new Alias("alias")) .addMapping("type1") ); ensureGreen(); // Index some documents client().prepareIndex() .setIndex("test") .setType("type1") .setId("id1") .setRouting("routing1") .setSource("field1", 1, "content", "foo") .execute().actionGet(); client().prepareIndex() .setIndex("test") .setType("type1") .setId("id2") .setSource("field1", 0, "content", "bar") .execute().actionGet(); // Update the first object and note context variables values UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "id1") .setRouting("routing1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, EXTRACT_CTX_SCRIPT, Collections.emptyMap())) .execute().actionGet(); assertEquals(2, updateResponse.getVersion()); GetResponse getResponse = client().prepareGet("test", "type1", "id1").setRouting("routing1").execute().actionGet(); Map<String, Object> updateContext = (Map<String, Object>) getResponse.getSourceAsMap().get("update_context"); assertEquals("test", updateContext.get("_index")); assertEquals("type1", updateContext.get("_type")); assertEquals("id1", updateContext.get("_id")); assertEquals(1, updateContext.get("_version")); assertEquals("routing1", updateContext.get("_routing")); // Idem with the second object updateResponse = client().prepareUpdate("test", "type1", "id2") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, EXTRACT_CTX_SCRIPT, Collections.emptyMap())) .execute().actionGet(); assertEquals(2, updateResponse.getVersion()); getResponse = client().prepareGet("test", "type1", "id2").execute().actionGet(); updateContext = (Map<String, Object>) getResponse.getSourceAsMap().get("update_context"); assertEquals("test", updateContext.get("_index")); assertEquals("type1", updateContext.get("_type")); assertEquals("id2", updateContext.get("_id")); assertEquals(1, updateContext.get("_version")); assertNull(updateContext.get("_routing")); assertNull(updateContext.get("_ttl")); } public void testConcurrentUpdateWithRetryOnConflict() throws Exception { final boolean useBulkApi = randomBoolean(); createTestIndex(); ensureGreen(); int numberOfThreads = scaledRandomIntBetween(2,5); final CountDownLatch latch = new CountDownLatch(numberOfThreads); final CountDownLatch startLatch = new CountDownLatch(1); final int numberOfUpdatesPerThread = scaledRandomIntBetween(100, 500); final List<Exception> failures = new CopyOnWriteArrayList<>(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); for (int i = 0; i < numberOfThreads; i++) { Runnable r = new Runnable() { @Override public void run() { try { startLatch.await(); for (int i = 0; i < numberOfUpdatesPerThread; i++) { if (i % 100 == 0) { logger.debug("Client [{}] issued [{}] of [{}] requests", Thread.currentThread().getName(), i, numberOfUpdatesPerThread); } if (useBulkApi) { UpdateRequestBuilder updateRequestBuilder = client().prepareUpdate(indexOrAlias(), "type1", Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()); client().prepareBulk().add(updateRequestBuilder).execute().actionGet(); } else { client().prepareUpdate(indexOrAlias(), "type1", Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) .execute().actionGet(); } } logger.info("Client [{}] issued all [{}] requests.", Thread.currentThread().getName(), numberOfUpdatesPerThread); } catch (InterruptedException e) { // test infrastructure kills long-running tests by interrupting them, thus we handle this case separately logger.warn("Test was forcefully stopped. Client [{}] may still have outstanding requests.", Thread.currentThread().getName()); failures.add(e); Thread.currentThread().interrupt(); } catch (Exception e) { failures.add(e); } finally { latch.countDown(); } } }; Thread updater = new Thread(r); updater.setName("UpdateIT-Client-" + i); updater.start(); } startLatch.countDown(); latch.await(); for (Throwable throwable : failures) { logger.info("Captured failure on concurrent update:", throwable); } assertThat(failures.size(), equalTo(0)); for (int i = 0; i < numberOfUpdatesPerThread; i++) { GetResponse response = client().prepareGet("test", "type1", Integer.toString(i)).execute().actionGet(); assertThat(response.getId(), equalTo(Integer.toString(i))); assertThat(response.isExists(), equalTo(true)); assertThat(response.getVersion(), equalTo((long) numberOfThreads)); assertThat(response.getSource().get("field"), equalTo(numberOfThreads)); } } public void testStressUpdateDeleteConcurrency() throws Exception { //We create an index with merging disabled so that deletes don't get merged away assertAcked(prepareCreate("test") .setSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_ENABLED, false))); ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); final int numberOfThreads = scaledRandomIntBetween(3,5); final int numberOfIdsPerThread = scaledRandomIntBetween(3,10); final int numberOfUpdatesPerId = scaledRandomIntBetween(10,100); final int retryOnConflict = randomIntBetween(0,1); final CountDownLatch latch = new CountDownLatch(numberOfThreads); final CountDownLatch startLatch = new CountDownLatch(1); final List<Throwable> failures = new CopyOnWriteArrayList<>(); final class UpdateThread extends Thread { final Map<Integer,Integer> failedMap = new HashMap<>(); final int numberOfIds; final int maxUpdateRequests = numberOfIdsPerThread*numberOfUpdatesPerId; final int maxDeleteRequests = numberOfIdsPerThread*numberOfUpdatesPerId; private final Semaphore updateRequestsOutstanding = new Semaphore(maxUpdateRequests); private final Semaphore deleteRequestsOutstanding = new Semaphore(maxDeleteRequests); UpdateThread(int numberOfIds) { this.numberOfIds = numberOfIds; } final class UpdateListener implements ActionListener<UpdateResponse> { int id; UpdateListener(int id) { this.id = id; } @Override public void onResponse(UpdateResponse updateResponse) { updateRequestsOutstanding.release(1); } @Override public void onFailure(Exception e) { synchronized (failedMap) { incrementMapValue(id, failedMap); } updateRequestsOutstanding.release(1); } } final class DeleteListener implements ActionListener<DeleteResponse> { int id; DeleteListener(int id) { this.id = id; } @Override public void onResponse(DeleteResponse deleteResponse) { deleteRequestsOutstanding.release(1); } @Override public void onFailure(Exception e) { synchronized (failedMap) { incrementMapValue(id, failedMap); } deleteRequestsOutstanding.release(1); } } @Override public void run(){ try { startLatch.await(); boolean hasWaitedForNoNode = false; for (int j = 0; j < numberOfIds; j++) { for (int k = 0; k < numberOfUpdatesPerId; ++k) { updateRequestsOutstanding.acquire(); try { UpdateRequest ur = client().prepareUpdate("test", "type1", Integer.toString(j)) .setScript(fieldIncScript) .setRetryOnConflict(retryOnConflict) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) .request(); client().update(ur, new UpdateListener(j)); } catch (NoNodeAvailableException nne) { updateRequestsOutstanding.release(); synchronized (failedMap) { incrementMapValue(j, failedMap); } if (hasWaitedForNoNode) { throw nne; } logger.warn("Got NoNodeException waiting for 1 second for things to recover."); hasWaitedForNoNode = true; Thread.sleep(1000); } try { deleteRequestsOutstanding.acquire(); DeleteRequest dr = client().prepareDelete("test", "type1", Integer.toString(j)).request(); client().delete(dr, new DeleteListener(j)); } catch (NoNodeAvailableException nne) { deleteRequestsOutstanding.release(); synchronized (failedMap) { incrementMapValue(j, failedMap); } if (hasWaitedForNoNode) { throw nne; } logger.warn("Got NoNodeException waiting for 1 second for things to recover."); hasWaitedForNoNode = true; Thread.sleep(1000); //Wait for no-node to clear } } } } catch (Exception e) { logger.error("Something went wrong", e); failures.add(e); } finally { try { waitForOutstandingRequests(TimeValue.timeValueSeconds(60), updateRequestsOutstanding, maxUpdateRequests, "Update"); waitForOutstandingRequests(TimeValue.timeValueSeconds(60), deleteRequestsOutstanding, maxDeleteRequests, "Delete"); } catch (ElasticsearchTimeoutException ete) { failures.add(ete); } latch.countDown(); } } private void incrementMapValue(int j, Map<Integer,Integer> map) { if (!map.containsKey(j)) { map.put(j, 0); } map.put(j, map.get(j) + 1); } private void waitForOutstandingRequests(TimeValue timeOut, Semaphore requestsOutstanding, int maxRequests, String name) { long start = System.currentTimeMillis(); do { long msRemaining = timeOut.getMillis() - (System.currentTimeMillis() - start); logger.info("[{}] going to try and acquire [{}] in [{}]ms [{}] available to acquire right now",name, maxRequests,msRemaining, requestsOutstanding.availablePermits()); try { requestsOutstanding.tryAcquire(maxRequests, msRemaining, TimeUnit.MILLISECONDS ); return; } catch (InterruptedException ie) { //Just keep swimming } } while ((System.currentTimeMillis() - start) < timeOut.getMillis()); throw new ElasticsearchTimeoutException("Requests were still outstanding after the timeout [" + timeOut + "] for type [" + name + "]" ); } } final List<UpdateThread> threads = new ArrayList<>(); for (int i = 0; i < numberOfThreads; i++) { UpdateThread ut = new UpdateThread(numberOfIdsPerThread); ut.start(); threads.add(ut); } startLatch.countDown(); latch.await(); for (UpdateThread ut : threads){ ut.join(); //Threads should have finished because of the latch.await } //If are no errors every request received a response otherwise the test would have timedout //aquiring the request outstanding semaphores. for (Throwable throwable : failures) { logger.info("Captured failure on concurrent update:", throwable); } assertThat(failures.size(), equalTo(0)); //Upsert all the ids one last time to make sure they are available at get time //This means that we add 1 to the expected versions and attempts //All the previous operations should be complete or failed at this point for (int i = 0; i < numberOfIdsPerThread; ++i) { client().prepareUpdate("test", "type1", Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) .execute().actionGet(); } refresh(); for (int i = 0; i < numberOfIdsPerThread; ++i) { int totalFailures = 0; GetResponse response = client().prepareGet("test", "type1", Integer.toString(i)).execute().actionGet(); if (response.isExists()) { assertThat(response.getId(), equalTo(Integer.toString(i))); int expectedVersion = (numberOfThreads * numberOfUpdatesPerId * 2) + 1; for (UpdateThread ut : threads) { if (ut.failedMap.containsKey(i)) { totalFailures += ut.failedMap.get(i); } } expectedVersion -= totalFailures; logger.error("Actual version [{}] Expected version [{}] Total failures [{}]", response.getVersion(), expectedVersion, totalFailures); assertThat(response.getVersion(), equalTo((long) expectedVersion)); assertThat(response.getVersion() + totalFailures, equalTo( (long)((numberOfUpdatesPerId * numberOfThreads * 2) + 1) )); } } } private static String indexOrAlias() { return randomBoolean() ? "test" : "alias"; } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.collections; import alluxio.util.CommonUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.Callable; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicBoolean; /** * Test concurrent behavior of {@link IndexedSet}. */ public class IndexedSetConcurrencyTest { /** The maximum value for the size value for the test object. */ private static final int MAX_SIZE = 30; /** The duration for each test. */ private static final int TEST_CASE_DURATION_MS = 5000; /** The minimum number of threads for each task type. */ private static final int MIN_TASKS = 3; /** The maximum number of threads for each task type. */ private static final int MAX_TASKS = 6; /** the maximum repeatable times in one task of size in {@link TestInfo}. */ private static final int MAX_REPEAT_TIMES = 6; private IndexedSet<TestInfo> mIndexedSet; private ExecutorService mThreadPool; /** Used to stop concurrent threads. */ private AtomicBoolean mStopThreads; private abstract class ConcurrentTask implements Callable<Void> { private long mCount = 0; private CyclicBarrier mBarrier; public ConcurrentTask(CyclicBarrier barrier) { mBarrier = barrier; } public long getCount() { return mCount; } /** * Runs a single task. * * @return number of items added or deleted */ abstract long runSingleTask(); @Override public Void call() throws BrokenBarrierException, InterruptedException { mBarrier.await(); while (!mStopThreads.get()) { mCount += runSingleTask(); } return null; } } private class ConcurrentAdd extends ConcurrentTask { public ConcurrentAdd(CyclicBarrier barrier) { super(barrier); } @Override public long runSingleTask() { return mIndexedSet.add(new TestInfo()) ? 1 : 0; } } private class ConcurrentAddWithCheck extends ConcurrentTask { public ConcurrentAddWithCheck(CyclicBarrier barrier) { super(barrier); } @Override public long runSingleTask() { long result = 0; int size = ThreadLocalRandom.current().nextInt(0, MAX_SIZE); for (int i = ThreadLocalRandom.current().nextInt(1, MAX_REPEAT_TIMES + 1); i > 0; i--) { TestInfo instance = new TestInfo(ThreadLocalRandom.current().nextLong(), size); result += (mIndexedSet.add(instance) ? 1 : 0); Assert.assertTrue(mIndexedSet.contains(mIdIndex, instance.getId())); Assert.assertEquals(1, mIndexedSet.getByField(mIdIndex, instance.getId()).size()); } Assert.assertTrue(result <= mIndexedSet.getByField(mSizeIndex, size).size()); return result; } } private class ConcurrentRemove extends ConcurrentTask { public ConcurrentRemove(CyclicBarrier barrier) { super(barrier); } @Override public long runSingleTask() { TestInfo info = mIndexedSet.getFirstByField(mSizeIndex, ThreadLocalRandom.current().nextInt(0, MAX_SIZE)); if (info != null) { return mIndexedSet.remove(info) ? 1 : 0; } return 0; } } private class ConcurrentRemoveByField extends ConcurrentTask { public ConcurrentRemoveByField(CyclicBarrier barrier) { super(barrier); } @Override public long runSingleTask() { return mIndexedSet .removeByField(mSizeIndex, ThreadLocalRandom.current().nextInt(0, MAX_SIZE)); } } private class ConcurrentRemoveByIterator extends ConcurrentTask { public ConcurrentRemoveByIterator(CyclicBarrier barrier) { super(barrier); } @Override public long runSingleTask() { long removed = 0; Iterator<TestInfo> it = mIndexedSet.iterator(); while (it.hasNext()) { it.next(); it.remove(); removed++; } return removed; } } private class ConcurrentClear extends ConcurrentTask { public ConcurrentClear(CyclicBarrier barrier) { super(barrier); } @Override public long runSingleTask() { mIndexedSet.clear(); return 1; } } private final class TestInfo { private long mId; private int mSize; private TestInfo() { this(ThreadLocalRandom.current().nextLong(), ThreadLocalRandom.current().nextInt(0, MAX_SIZE)); } private TestInfo(long id, int size) { mId = id; mSize = size; } public long getId() { return mId; } public int getSize() { return mSize; } } private final IndexDefinition<TestInfo> mIdIndex = new IndexDefinition<TestInfo>(true) { @Override public Object getFieldValue(TestInfo o) { return o.getId(); } }; private final IndexDefinition<TestInfo> mSizeIndex = new IndexDefinition<TestInfo>(false) { @Override public Object getFieldValue(TestInfo o) { return o.getSize(); } }; @Before public void before() throws Exception { mIndexedSet = new IndexedSet<>(mIdIndex, mSizeIndex); mThreadPool = Executors.newCachedThreadPool(); mStopThreads = new AtomicBoolean(false); } @After public void after() { mThreadPool.shutdownNow(); } /** * Verifies the {@link #mIndexedSet} for internal consistency. */ private void verifySet() { Iterator<TestInfo> it = mIndexedSet.iterator(); Set<Long> ids = new HashSet<>(); Set<Integer> sizes = new HashSet<>(); // Verify the size. int expectedCount = 0; while (it.hasNext()) { TestInfo info = it.next(); ids.add(info.getId()); sizes.add(info.getSize()); expectedCount++; } Assert.assertEquals(expectedCount, mIndexedSet.size()); // Verify the size according to the id index. int count = 0; for (Long id : ids) { Set<TestInfo> elements = mIndexedSet.getByField(mIdIndex, id); count += elements.size(); } Assert.assertEquals(expectedCount, count); // Verify the size according to the size index. count = 0; for (Integer size : sizes) { Set<TestInfo> elements = mIndexedSet.getByField(mSizeIndex, size); count += elements.size(); } Assert.assertEquals(expectedCount, count); } @Test public void basicConcurrentUpdate() throws Exception { List<Future<?>> futures = new ArrayList<>(); List<ConcurrentTask> addTasks = new ArrayList<>(); List<ConcurrentTask> removeTasks = new ArrayList<>(); int[] tasksNumbers = new int[3]; int totalTasksNumber = 0; // Try to balance adds and removes tasksNumbers[0] = 2 * ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1); totalTasksNumber += tasksNumbers[0]; // Add random number of each task type. for (int i = 1; i < 3; i++) { tasksNumbers[i] = ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1); totalTasksNumber += tasksNumbers[i]; } CyclicBarrier barrier = new CyclicBarrier(totalTasksNumber); // Add random number of each task type. for (int i = 0; i < tasksNumbers[0]; i++) { // Try to balance adds and removes addTasks.add(new ConcurrentAdd(barrier)); } for (int i = 0; i < tasksNumbers[1]; i++) { removeTasks.add(new ConcurrentRemove(barrier)); } for (int i = 0; i < tasksNumbers[2]; i++) { removeTasks.add(new ConcurrentRemoveByField(barrier)); } for (ConcurrentTask task : addTasks) { futures.add(mThreadPool.submit(task)); } for (ConcurrentTask task : removeTasks) { futures.add(mThreadPool.submit(task)); } CommonUtils.sleepMs(TEST_CASE_DURATION_MS); mStopThreads.set(true); for (Future<?> future : futures) { future.get(); } // Calculate how many elements have been added or removed. long added = 0; for (ConcurrentTask task : addTasks) { added += task.getCount(); } long removed = 0; for (ConcurrentTask task : removeTasks) { removed += task.getCount(); } Assert.assertEquals(mIndexedSet.size(), added - removed); verifySet(); } @Test public void concurrentUpdate() throws Exception { List<Future<?>> futures = new ArrayList<>(); int[] tasksNumbers = new int[5]; int totalTasksNumber = 0; // Try to balance adds and removes tasksNumbers[0] = 4 * ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1); totalTasksNumber += tasksNumbers[0]; // Add random number of each task type. for (int i = 1; i < 5; i++) { tasksNumbers[i] = ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1); totalTasksNumber += tasksNumbers[i]; } CyclicBarrier barrier = new CyclicBarrier(totalTasksNumber); for (int i = 0; i < tasksNumbers[0]; i++) { futures.add(mThreadPool.submit(new ConcurrentAdd(barrier))); } for (int i = 0; i < tasksNumbers[1]; i++) { futures.add(mThreadPool.submit(new ConcurrentRemove(barrier))); } for (int i = 0; i < tasksNumbers[2]; i++) { futures.add(mThreadPool.submit(new ConcurrentRemoveByField(barrier))); } for (int i = 0; i < tasksNumbers[3]; i++) { futures.add(mThreadPool.submit(new ConcurrentRemoveByIterator(barrier))); } for (int i = 0; i < tasksNumbers[4]; i++) { futures.add(mThreadPool.submit(new ConcurrentClear(barrier))); } CommonUtils.sleepMs(TEST_CASE_DURATION_MS); mStopThreads.set(true); for (Future<?> future : futures) { future.get(); } verifySet(); } @Test public void concurrentAddTest() throws Exception { List<Future<?>> futures = new ArrayList<>(); // Add random number of each task type. int tasksNumber = 2 * ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1); CyclicBarrier barrier = new CyclicBarrier(tasksNumber); for (int i = 0; i < tasksNumber; i++) { futures.add(mThreadPool.submit(new ConcurrentAddWithCheck(barrier))); } CommonUtils.sleepMs(TEST_CASE_DURATION_MS); mStopThreads.set(true); for (Future<?> future : futures) { future.get(); } verifySet(); } /** * Use the mSizeIndex as primary index, test the correctness of using non-unique index as primary * index. */ @Test public void nonUniqueConcurrentUpdateTest() throws Exception { mIndexedSet = new IndexedSet<>(mSizeIndex, mIdIndex); List<Future<?>> futures = new ArrayList<>(); int[] tasksNumbers = new int[5]; int totalTasksNumber = 0; // Try to balance adds and removes tasksNumbers[0] = 4 * ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1); totalTasksNumber += tasksNumbers[0]; // Add random number of each task type. for (int i = 1; i < 5; i++) { tasksNumbers[i] = ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1); totalTasksNumber += tasksNumbers[i]; } CyclicBarrier barrier = new CyclicBarrier(totalTasksNumber); for (int i = 0; i < tasksNumbers[0]; i++) { futures.add(mThreadPool.submit(new ConcurrentAdd(barrier))); } for (int i = 0; i < tasksNumbers[1]; i++) { futures.add(mThreadPool.submit(new ConcurrentRemove(barrier))); } for (int i = 0; i < tasksNumbers[2]; i++) { futures.add(mThreadPool.submit(new ConcurrentRemoveByField(barrier))); } for (int i = 0; i < tasksNumbers[3]; i++) { futures.add(mThreadPool.submit(new ConcurrentRemoveByIterator(barrier))); } for (int i = 0; i < tasksNumbers[4]; i++) { futures.add(mThreadPool.submit(new ConcurrentClear(barrier))); } CommonUtils.sleepMs(TEST_CASE_DURATION_MS); mStopThreads.set(true); for (Future<?> future : futures) { future.get(); } verifySet(); } }
// // ======================================================================== // Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.client; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketTimeoutException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.net.ssl.SSLEngine; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.eclipse.jetty.client.api.Connection; import org.eclipse.jetty.client.api.Destination; import org.eclipse.jetty.client.api.Request; import org.eclipse.jetty.client.api.Response; import org.eclipse.jetty.client.api.Result; import org.eclipse.jetty.client.http.HttpClientTransportOverHTTP; import org.eclipse.jetty.client.http.HttpDestinationOverHTTP; import org.eclipse.jetty.client.util.BufferingResponseListener; import org.eclipse.jetty.client.util.InputStreamContentProvider; import org.eclipse.jetty.io.ByteBufferPool; import org.eclipse.jetty.io.ClientConnectionFactory; import org.eclipse.jetty.io.EndPoint; import org.eclipse.jetty.io.ssl.SslClientConnectionFactory; import org.eclipse.jetty.io.ssl.SslConnection; import org.eclipse.jetty.server.handler.AbstractHandler; import org.eclipse.jetty.toolchain.test.annotation.Slow; import org.eclipse.jetty.util.FuturePromise; import org.eclipse.jetty.util.IO; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Assume; import org.junit.Ignore; import org.junit.Test; public class HttpClientTimeoutTest extends AbstractHttpClientServerTest { public HttpClientTimeoutTest(SslContextFactory sslContextFactory) { super(sslContextFactory); } @Slow @Test(expected = TimeoutException.class) public void testTimeoutOnFuture() throws Exception { long timeout = 1000; start(new TimeoutHandler(2 * timeout)); client.newRequest("localhost", connector.getLocalPort()) .scheme(scheme) .timeout(timeout, TimeUnit.MILLISECONDS) .send(); } @Slow @Test public void testTimeoutOnListener() throws Exception { long timeout = 1000; start(new TimeoutHandler(2 * timeout)); final CountDownLatch latch = new CountDownLatch(1); Request request = client.newRequest("localhost", connector.getLocalPort()) .scheme(scheme) .timeout(timeout, TimeUnit.MILLISECONDS); request.send(new Response.CompleteListener() { @Override public void onComplete(Result result) { Assert.assertTrue(result.isFailed()); latch.countDown(); } }); Assert.assertTrue(latch.await(3 * timeout, TimeUnit.MILLISECONDS)); } @Slow @Test public void testTimeoutOnQueuedRequest() throws Exception { long timeout = 1000; start(new TimeoutHandler(3 * timeout)); // Only one connection so requests get queued client.setMaxConnectionsPerDestination(1); // The first request has a long timeout final CountDownLatch firstLatch = new CountDownLatch(1); Request request = client.newRequest("localhost", connector.getLocalPort()) .scheme(scheme) .timeout(4 * timeout, TimeUnit.MILLISECONDS); request.send(new Response.CompleteListener() { @Override public void onComplete(Result result) { Assert.assertFalse(result.isFailed()); firstLatch.countDown(); } }); // Second request has a short timeout and should fail in the queue final CountDownLatch secondLatch = new CountDownLatch(1); request = client.newRequest("localhost", connector.getLocalPort()) .scheme(scheme) .timeout(timeout, TimeUnit.MILLISECONDS); request.send(new Response.CompleteListener() { @Override public void onComplete(Result result) { Assert.assertTrue(result.isFailed()); secondLatch.countDown(); } }); Assert.assertTrue(secondLatch.await(2 * timeout, TimeUnit.MILLISECONDS)); // The second request must fail before the first request has completed Assert.assertTrue(firstLatch.getCount() > 0); Assert.assertTrue(firstLatch.await(5 * timeout, TimeUnit.MILLISECONDS)); } @Slow @Test public void testTimeoutIsCancelledOnSuccess() throws Exception { long timeout = 1000; start(new TimeoutHandler(timeout)); final CountDownLatch latch = new CountDownLatch(1); final byte[] content = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; Request request = client.newRequest("localhost", connector.getLocalPort()) .scheme(scheme) .content(new InputStreamContentProvider(new ByteArrayInputStream(content))) .timeout(2 * timeout, TimeUnit.MILLISECONDS); request.send(new BufferingResponseListener() { @Override public void onComplete(Result result) { Assert.assertFalse(result.isFailed()); Assert.assertArrayEquals(content, getContent()); latch.countDown(); } }); Assert.assertTrue(latch.await(3 * timeout, TimeUnit.MILLISECONDS)); TimeUnit.MILLISECONDS.sleep(2 * timeout); Assert.assertNull(request.getAbortCause()); } @Slow @Test public void testTimeoutOnListenerWithExplicitConnection() throws Exception { long timeout = 1000; start(new TimeoutHandler(2 * timeout)); final CountDownLatch latch = new CountDownLatch(1); Destination destination = client.getDestination(scheme, "localhost", connector.getLocalPort()); FuturePromise<Connection> futureConnection = new FuturePromise<>(); destination.newConnection(futureConnection); try (Connection connection = futureConnection.get(5, TimeUnit.SECONDS)) { Request request = client.newRequest("localhost", connector.getLocalPort()) .scheme(scheme) .timeout(timeout, TimeUnit.MILLISECONDS); connection.send(request, new Response.CompleteListener() { @Override public void onComplete(Result result) { Assert.assertTrue(result.isFailed()); latch.countDown(); } }); Assert.assertTrue(latch.await(3 * timeout, TimeUnit.MILLISECONDS)); } } @Slow @Test public void testTimeoutIsCancelledOnSuccessWithExplicitConnection() throws Exception { long timeout = 1000; start(new TimeoutHandler(timeout)); final CountDownLatch latch = new CountDownLatch(1); Destination destination = client.getDestination(scheme, "localhost", connector.getLocalPort()); FuturePromise<Connection> futureConnection = new FuturePromise<>(); destination.newConnection(futureConnection); try (Connection connection = futureConnection.get(5, TimeUnit.SECONDS)) { Request request = client.newRequest(destination.getHost(), destination.getPort()) .scheme(scheme) .timeout(2 * timeout, TimeUnit.MILLISECONDS); connection.send(request, new Response.CompleteListener() { @Override public void onComplete(Result result) { Response response = result.getResponse(); Assert.assertEquals(200, response.getStatus()); Assert.assertFalse(result.isFailed()); latch.countDown(); } }); Assert.assertTrue(latch.await(3 * timeout, TimeUnit.MILLISECONDS)); TimeUnit.MILLISECONDS.sleep(2 * timeout); Assert.assertNull(request.getAbortCause()); } } @Test public void testIdleTimeout() throws Throwable { long timeout = 1000; start(new TimeoutHandler(2 * timeout)); client.stop(); final AtomicBoolean sslIdle = new AtomicBoolean(); client = new HttpClient(new HttpClientTransportOverHTTP() { @Override public HttpDestination newHttpDestination(Origin origin) { return new HttpDestinationOverHTTP(getHttpClient(), origin) { @Override protected ClientConnectionFactory newSslClientConnectionFactory(ClientConnectionFactory connectionFactory) { HttpClient client = getHttpClient(); return new SslClientConnectionFactory(client.getSslContextFactory(), client.getByteBufferPool(), client.getExecutor(), connectionFactory) { @Override protected SslConnection newSslConnection(ByteBufferPool byteBufferPool, Executor executor, EndPoint endPoint, SSLEngine engine) { return new SslConnection(byteBufferPool, executor, endPoint, engine) { @Override protected boolean onReadTimeout() { sslIdle.set(true); return super.onReadTimeout(); } }; } }; } }; } }, sslContextFactory); client.setIdleTimeout(timeout); client.start(); try { client.newRequest("localhost", connector.getLocalPort()) .scheme(scheme) .send(); Assert.fail(); } catch (Exception x) { Assert.assertFalse(sslIdle.get()); Assert.assertThat(x.getCause(), Matchers.instanceOf(TimeoutException.class)); } } @Ignore @Slow @Test public void testConnectTimeoutFailsRequest() throws Exception { String host = "10.255.255.1"; int port = 80; int connectTimeout = 1000; assumeConnectTimeout(host, port, connectTimeout); start(new EmptyServerHandler()); client.stop(); client.setConnectTimeout(connectTimeout); client.start(); final CountDownLatch latch = new CountDownLatch(1); Request request = client.newRequest(host, port); request.scheme(scheme) .send(new Response.CompleteListener() { @Override public void onComplete(Result result) { if (result.isFailed()) latch.countDown(); } }); Assert.assertTrue(latch.await(2 * connectTimeout, TimeUnit.MILLISECONDS)); Assert.assertNotNull(request.getAbortCause()); } @Ignore @Slow @Test public void testConnectTimeoutIsCancelledByShorterTimeout() throws Exception { String host = "10.255.255.1"; int port = 80; int connectTimeout = 2000; assumeConnectTimeout(host, port, connectTimeout); start(new EmptyServerHandler()); client.stop(); client.setConnectTimeout(connectTimeout); client.start(); final AtomicInteger completes = new AtomicInteger(); final CountDownLatch latch = new CountDownLatch(2); Request request = client.newRequest(host, port); request.scheme(scheme) .timeout(connectTimeout / 2, TimeUnit.MILLISECONDS) .send(new Response.CompleteListener() { @Override public void onComplete(Result result) { completes.incrementAndGet(); latch.countDown(); } }); Assert.assertFalse(latch.await(2 * connectTimeout, TimeUnit.MILLISECONDS)); Assert.assertEquals(1, completes.get()); Assert.assertNotNull(request.getAbortCause()); } @Test public void testVeryShortTimeout() throws Exception { start(new EmptyServerHandler()); final CountDownLatch latch = new CountDownLatch(1); client.newRequest("localhost", connector.getLocalPort()) .scheme(scheme) .timeout(1, TimeUnit.MILLISECONDS) // Very short timeout .send(new Response.CompleteListener() { @Override public void onComplete(Result result) { latch.countDown(); } }); Assert.assertTrue(latch.await(5, TimeUnit.SECONDS)); } private void assumeConnectTimeout(String host, int port, int connectTimeout) throws IOException { try (Socket socket = new Socket()) { // Try to connect to a private address in the 10.x.y.z range. // These addresses are usually not routed, so an attempt to // connect to them will hang the connection attempt, which is // what we want to simulate in this test. socket.connect(new InetSocketAddress(host, port), connectTimeout); // Abort the test if we can connect. Assume.assumeTrue(false); } catch (SocketTimeoutException x) { // Expected timeout during connect, continue the test. Assume.assumeTrue(true); } } private class TimeoutHandler extends AbstractHandler { private final long timeout; public TimeoutHandler(long timeout) { this.timeout = timeout; } @Override public void handle(String target, org.eclipse.jetty.server.Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { baseRequest.setHandled(true); try { TimeUnit.MILLISECONDS.sleep(timeout); IO.copy(request.getInputStream(), response.getOutputStream()); } catch (InterruptedException x) { throw new ServletException(x); } } } }
/* * Copyright 2015-2016 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb; import com.mongodb.client.MapReduceIterable; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoIterable; import com.mongodb.client.model.Collation; import com.mongodb.client.model.FindOptions; import com.mongodb.client.model.MapReduceAction; import com.mongodb.operation.MapReduceToCollectionOperation; import com.mongodb.operation.MapReduceWithInlineResultsOperation; import com.mongodb.operation.OperationExecutor; import org.bson.BsonDocument; import org.bson.BsonJavaScript; import org.bson.codecs.configuration.CodecRegistry; import org.bson.conversions.Bson; import java.util.Collection; import java.util.concurrent.TimeUnit; import static com.mongodb.ReadPreference.primary; import static com.mongodb.assertions.Assertions.notNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; class MapReduceIterableImpl<TDocument, TResult> implements MapReduceIterable<TResult> { private final MongoNamespace namespace; private final Class<TDocument> documentClass; private final Class<TResult> resultClass; private final ReadPreference readPreference; private final ReadConcern readConcern; private final CodecRegistry codecRegistry; private final WriteConcern writeConcern; private final OperationExecutor executor; private final String mapFunction; private final String reduceFunction; private boolean inline = true; private String collectionName; private String finalizeFunction; private Bson scope; private Bson filter; private Bson sort; private int limit; private boolean jsMode; private boolean verbose = true; private long maxTimeMS; private MapReduceAction action = MapReduceAction.REPLACE; private String databaseName; private boolean sharded; private boolean nonAtomic; private int batchSize; private Boolean bypassDocumentValidation; private Collation collation; MapReduceIterableImpl(final MongoNamespace namespace, final Class<TDocument> documentClass, final Class<TResult> resultClass, final CodecRegistry codecRegistry, final ReadPreference readPreference, final ReadConcern readConcern, final WriteConcern writeConcern, final OperationExecutor executor, final String mapFunction, final String reduceFunction) { this.namespace = notNull("namespace", namespace); this.documentClass = notNull("documentClass", documentClass); this.resultClass = notNull("resultClass", resultClass); this.codecRegistry = notNull("codecRegistry", codecRegistry); this.readPreference = notNull("readPreference", readPreference); this.readConcern = notNull("readConcern", readConcern); this.writeConcern = notNull("writeConcern", writeConcern); this.executor = notNull("executor", executor); this.mapFunction = notNull("mapFunction", mapFunction); this.reduceFunction = notNull("reduceFunction", reduceFunction); } @Override public void toCollection() { if (inline) { throw new IllegalStateException("The options must specify a non-inline result"); } executor.execute(createMapReduceToCollectionOperation()); } @Override public MapReduceIterable<TResult> collectionName(final String collectionName) { this.collectionName = notNull("collectionName", collectionName); this.inline = false; return this; } @Override public MapReduceIterable<TResult> finalizeFunction(final String finalizeFunction) { this.finalizeFunction = finalizeFunction; return this; } @Override public MapReduceIterable<TResult> scope(final Bson scope) { this.scope = scope; return this; } @Override public MapReduceIterable<TResult> sort(final Bson sort) { this.sort = sort; return this; } @Override public MapReduceIterable<TResult> filter(final Bson filter) { this.filter = filter; return this; } @Override public MapReduceIterable<TResult> limit(final int limit) { this.limit = limit; return this; } @Override public MapReduceIterable<TResult> jsMode(final boolean jsMode) { this.jsMode = jsMode; return this; } @Override public MapReduceIterable<TResult> verbose(final boolean verbose) { this.verbose = verbose; return this; } @Override public MapReduceIterable<TResult> maxTime(final long maxTime, final TimeUnit timeUnit) { notNull("timeUnit", timeUnit); this.maxTimeMS = TimeUnit.MILLISECONDS.convert(maxTime, timeUnit); return this; } @Override public MapReduceIterable<TResult> action(final MapReduceAction action) { this.action = action; return this; } @Override public MapReduceIterable<TResult> databaseName(final String databaseName) { this.databaseName = databaseName; return this; } @Override public MapReduceIterable<TResult> sharded(final boolean sharded) { this.sharded = sharded; return this; } @Override public MapReduceIterable<TResult> nonAtomic(final boolean nonAtomic) { this.nonAtomic = nonAtomic; return this; } @Override public MapReduceIterable<TResult> batchSize(final int batchSize) { this.batchSize = batchSize; return this; } @Override public MapReduceIterable<TResult> bypassDocumentValidation(final Boolean bypassDocumentValidation) { this.bypassDocumentValidation = bypassDocumentValidation; return this; } @Override public MapReduceIterable<TResult> collation(final Collation collation) { this.collation = collation; return this; } @Override public MongoCursor<TResult> iterator() { return execute().iterator(); } @Override public TResult first() { return execute().first(); } @Override public <U> MongoIterable<U> map(final Function<TResult, U> mapper) { return new MappingIterable<TResult, U>(this, mapper); } @Override public void forEach(final Block<? super TResult> block) { execute().forEach(block); } @Override public <A extends Collection<? super TResult>> A into(final A target) { return execute().into(target); } MongoIterable<TResult> execute() { if (inline) { MapReduceWithInlineResultsOperation<TResult> operation = new MapReduceWithInlineResultsOperation<TResult>(namespace, new BsonJavaScript(mapFunction), new BsonJavaScript(reduceFunction), codecRegistry.get(resultClass)) .filter(toBsonDocument(filter)) .limit(limit) .maxTime(maxTimeMS, MILLISECONDS) .jsMode(jsMode) .scope(toBsonDocument(scope)) .sort(toBsonDocument(sort)) .verbose(verbose) .readConcern(readConcern) .collation(collation); if (finalizeFunction != null) { operation.finalizeFunction(new BsonJavaScript(finalizeFunction)); } return new OperationIterable<TResult>(operation, readPreference, executor); } else { executor.execute(createMapReduceToCollectionOperation()); String dbName = databaseName != null ? databaseName : namespace.getDatabaseName(); return new FindIterableImpl<TDocument, TResult>(new MongoNamespace(dbName, collectionName), documentClass, resultClass, codecRegistry, primary(), readConcern, executor, new BsonDocument(), new FindOptions().collation(collation).batchSize(batchSize)); } } private MapReduceToCollectionOperation createMapReduceToCollectionOperation() { MapReduceToCollectionOperation operation = new MapReduceToCollectionOperation(namespace, new BsonJavaScript(mapFunction), new BsonJavaScript(reduceFunction), collectionName, writeConcern) .filter(toBsonDocument(filter)) .limit(limit) .maxTime(maxTimeMS, MILLISECONDS) .jsMode(jsMode) .scope(toBsonDocument(scope)) .sort(toBsonDocument(sort)) .verbose(verbose) .action(action.getValue()) .nonAtomic(nonAtomic) .sharded(sharded) .databaseName(databaseName) .bypassDocumentValidation(bypassDocumentValidation) .collation(collation); if (finalizeFunction != null) { operation.finalizeFunction(new BsonJavaScript(finalizeFunction)); } return operation; } private BsonDocument toBsonDocument(final Bson document) { return document == null ? null : document.toBsonDocument(documentClass, codecRegistry); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.olingo4.springboot; import java.util.Map; import javax.annotation.Generated; import org.apache.camel.component.olingo4.internal.Olingo4ApiName; import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon; import org.apache.camel.support.jsse.SSLContextParameters; import org.apache.http.HttpHost; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Communicates with OData 4.0 services using Apache Olingo OData API. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @ConfigurationProperties(prefix = "camel.component.olingo4") public class Olingo4ComponentConfiguration extends ComponentConfigurationPropertiesCommon { /** * Whether to enable auto configuration of the olingo4 component. This is * enabled by default. */ private Boolean enabled; /** * To use the shared configuration */ private Olingo4ConfigurationNestedConfiguration configuration; /** * Enable usage of global SSL context parameters. */ private Boolean useGlobalSslContextParameters = false; /** * Whether the component should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities */ private Boolean basicPropertyBinding = false; /** * Whether the producer should be started lazy (on the first message). By * starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during starting * and cause the route to fail being started. By deferring this startup to * be lazy then the startup failure can be handled during routing messages * via Camel's routing error handlers. Beware that when the first message is * processed then creating and starting the producer may take a little time * and prolong the total processing time of the processing. */ private Boolean lazyStartProducer = false; /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to pickup * incoming messages, or the likes, will now be processed as a message and * handled by the routing Error Handler. By default the consumer will use * the org.apache.camel.spi.ExceptionHandler to deal with exceptions, that * will be logged at WARN or ERROR level and ignored. */ private Boolean bridgeErrorHandler = false; public Olingo4ConfigurationNestedConfiguration getConfiguration() { return configuration; } public void setConfiguration( Olingo4ConfigurationNestedConfiguration configuration) { this.configuration = configuration; } public Boolean getUseGlobalSslContextParameters() { return useGlobalSslContextParameters; } public void setUseGlobalSslContextParameters( Boolean useGlobalSslContextParameters) { this.useGlobalSslContextParameters = useGlobalSslContextParameters; } public Boolean getBasicPropertyBinding() { return basicPropertyBinding; } public void setBasicPropertyBinding(Boolean basicPropertyBinding) { this.basicPropertyBinding = basicPropertyBinding; } public Boolean getLazyStartProducer() { return lazyStartProducer; } public void setLazyStartProducer(Boolean lazyStartProducer) { this.lazyStartProducer = lazyStartProducer; } public Boolean getBridgeErrorHandler() { return bridgeErrorHandler; } public void setBridgeErrorHandler(Boolean bridgeErrorHandler) { this.bridgeErrorHandler = bridgeErrorHandler; } public static class Olingo4ConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.olingo4.Olingo4Configuration.class; /** * What kind of operation to perform */ private Olingo4ApiName apiName; /** * What sub operation to use for the selected operation */ private String methodName; /** * Target OData service base URI, e.g. * http://services.odata.org/OData/OData.svc */ private String serviceUri; /** * Content-Type header value can be used to specify JSON or XML message * format, defaults to application/json;charset=utf-8 */ private String contentType = "application/json;charset=utf-8"; /** * Custom HTTP headers to inject into every request, this could include * OAuth tokens, etc. */ private Map httpHeaders; /** * HTTP connection creation timeout in milliseconds, defaults to 30,000 * (30 seconds) */ private Integer connectTimeout = 30000; /** * HTTP request timeout in milliseconds, defaults to 30,000 (30 seconds) */ private Integer socketTimeout = 30000; /** * HTTP proxy server configuration */ private HttpHost proxy; /** * To configure security using SSLContextParameters */ private SSLContextParameters sslContextParameters; /** * Custom HTTP async client builder for more complex HTTP client * configuration, overrides connectionTimeout, socketTimeout, proxy and * sslContext. Note that a socketTimeout MUST be specified in the * builder, otherwise OData requests could block indefinitely */ private HttpAsyncClientBuilder httpAsyncClientBuilder; /** * Custom HTTP client builder for more complex HTTP client * configuration, overrides connectionTimeout, socketTimeout, proxy and * sslContext. Note that a socketTimeout MUST be specified in the * builder, otherwise OData requests could block indefinitely */ private HttpClientBuilder httpClientBuilder; /** * Set this to true to filter out results that have already been * communicated by this component. */ private Boolean filterAlreadySeen = false; /** * For endpoints that return an array or collection, a consumer endpoint * will map every element to distinct messages, unless splitResult is * set to false. */ private Boolean splitResult = true; public Olingo4ApiName getApiName() { return apiName; } public void setApiName(Olingo4ApiName apiName) { this.apiName = apiName; } public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } public String getServiceUri() { return serviceUri; } public void setServiceUri(String serviceUri) { this.serviceUri = serviceUri; } public String getContentType() { return contentType; } public void setContentType(String contentType) { this.contentType = contentType; } public Map getHttpHeaders() { return httpHeaders; } public void setHttpHeaders(Map httpHeaders) { this.httpHeaders = httpHeaders; } public Integer getConnectTimeout() { return connectTimeout; } public void setConnectTimeout(Integer connectTimeout) { this.connectTimeout = connectTimeout; } public Integer getSocketTimeout() { return socketTimeout; } public void setSocketTimeout(Integer socketTimeout) { this.socketTimeout = socketTimeout; } public HttpHost getProxy() { return proxy; } public void setProxy(HttpHost proxy) { this.proxy = proxy; } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } public void setSslContextParameters( SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } public HttpAsyncClientBuilder getHttpAsyncClientBuilder() { return httpAsyncClientBuilder; } public void setHttpAsyncClientBuilder( HttpAsyncClientBuilder httpAsyncClientBuilder) { this.httpAsyncClientBuilder = httpAsyncClientBuilder; } public HttpClientBuilder getHttpClientBuilder() { return httpClientBuilder; } public void setHttpClientBuilder(HttpClientBuilder httpClientBuilder) { this.httpClientBuilder = httpClientBuilder; } public Boolean getFilterAlreadySeen() { return filterAlreadySeen; } public void setFilterAlreadySeen(Boolean filterAlreadySeen) { this.filterAlreadySeen = filterAlreadySeen; } public Boolean getSplitResult() { return splitResult; } public void setSplitResult(Boolean splitResult) { this.splitResult = splitResult; } } }
package hex.kmeans; import hex.*; import hex.util.LinearAlgebraUtils; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import water.*; import water.exceptions.H2OModelBuilderIllegalArgumentException; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.Vec; import water.util.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import static hex.genmodel.GenModel.Kmeans_preprocessData; /** * Scalable K-Means++ (KMeans||)<br> * http://theory.stanford.edu/~sergei/papers/vldb12-kmpar.pdf<br> * http://www.youtube.com/watch?v=cigXAxV3XcY */ public class KMeans extends ClusteringModelBuilder<KMeansModel,KMeansModel.KMeansParameters,KMeansModel.KMeansOutput> { @Override public ToEigenVec getToEigenVec() { return LinearAlgebraUtils.toEigen; } // Convergence tolerance final static private double TOLERANCE = 1e-4; @Override public ModelCategory[] can_build() { return new ModelCategory[]{ ModelCategory.Clustering }; } @Override public boolean havePojo() { return true; } @Override public boolean haveMojo() { return true; } public enum Initialization { Random, PlusPlus, Furthest, User } /** Start the KMeans training Job on an F/J thread. */ @Override protected KMeansDriver trainModelImpl() { return new KMeansDriver(); } // Called from an http request public KMeans( KMeansModel.KMeansParameters parms ) { super(parms ); init(false); } public KMeans( KMeansModel.KMeansParameters parms, Job job) { super(parms,job); init(false); } public KMeans(boolean startup_once) { super(new KMeansModel.KMeansParameters(),startup_once); } @Override protected void checkMemoryFootPrint_impl() { long mem_usage = 8 /*doubles*/ * _parms._k * _train.numCols() * (_parms._standardize ? 2 : 1); long max_mem = H2O.SELF._heartbeat.get_free_mem(); if (mem_usage > max_mem) { String msg = "Centroids won't fit in the driver node's memory (" + PrettyPrint.bytes(mem_usage) + " > " + PrettyPrint.bytes(max_mem) + ") - try reducing the number of columns and/or the number of categorical factors."; error("_train", msg); } } /** Initialize the ModelBuilder, validating all arguments and preparing the * training frame. This call is expected to be overridden in the subclasses * and each subclass will start with "super.init();". * * Validate K, max_iterations and the number of rows. */ @Override public void init(boolean expensive) { super.init(expensive); if(expensive) if(_parms._fold_column != null) _train.remove(_parms._fold_column); if( _parms._max_iterations <= 0 || _parms._max_iterations > 1e6) error("_max_iterations", " max_iterations must be between 1 and 1e6"); if (_train == null) return; if (_parms._init == Initialization.User && _parms._user_points == null) error("_user_y","Must specify initial cluster centers"); if (_parms._user_points != null) { // Check dimensions of user-specified centers Frame user_points = _parms._user_points.get(); if (user_points == null) error("_user_y", "User-specified points do not refer to a valid frame"); else if (user_points.numCols() != _train.numCols() - numSpecialCols()) error("_user_y", "The user-specified points must have the same number of columns (" + (_train.numCols() - numSpecialCols()) + ") as the training observations"); else if( user_points.numRows() != _parms._k) error("_user_y", "The number of rows in the user-specified points is not equal to k = " + _parms._k); } if (_parms._estimate_k) { if (_parms._user_points!=null) error("_estimate_k", "Cannot estimate k if user_points are provided."); info("_seed", "seed is ignored when estimate_k is enabled."); info("_init", "Initialization scheme is ignored when estimate_k is enabled - algorithm is deterministic."); if (expensive) { boolean numeric = false; for (Vec v : _train.vecs()) { if (v.isNumeric()) { numeric = true; break; } } if (!numeric) { error("_estimate_k", "Cannot estimate k if data has no numeric columns."); } } } if (expensive && error_count() == 0) checkMemoryFootPrint(); } // ---------------------- private final class KMeansDriver extends Driver { private String[][] _isCats; // Categorical columns // Initialize cluster centers double[][] initial_centers(KMeansModel model, final Vec[] vecs, final double[] means, final double[] mults, final int[] modes, int k) { // Categoricals use a different distance metric than numeric columns. model._output._categorical_column_count=0; _isCats = new String[vecs.length][]; for( int v=0; v<vecs.length; v++ ) { _isCats[v] = vecs[v].isCategorical() ? new String[0] : null; if (_isCats[v] != null) model._output._categorical_column_count++; } Random rand = water.util.RandomUtils.getRNG(_parms._seed-1); double centers[][]; // Cluster centers if( null != _parms._user_points ) { // User-specified starting points Frame user_points = _parms._user_points.get(); int numCenters = (int)user_points.numRows(); int numCols = model._output.nfeatures(); centers = new double[numCenters][numCols]; Vec[] centersVecs = user_points.vecs(); // Get the centers and standardize them if requested for (int r=0; r<numCenters; r++) { for (int c=0; c<numCols; c++){ centers[r][c] = centersVecs[c].at(r); centers[r][c] = Kmeans_preprocessData(centers[r][c], c, means, mults, modes); } } } else { // Random, Furthest, or PlusPlus initialization if (_parms._init == Initialization.Random) { // Initialize all cluster centers to random rows centers = new double[k][model._output.nfeatures()]; for (double[] center : centers) randomRow(vecs, rand, center, means, mults, modes); } else { centers = new double[1][model._output.nfeatures()]; // Initialize first cluster center to random row randomRow(vecs, rand, centers[0], means, mults, modes); model._output._iterations = 0; while (model._output._iterations < 5) { // Sum squares distances to cluster center SumSqr sqr = new SumSqr(centers, means, mults, modes, _isCats).doAll(vecs); // Sample with probability inverse to square distance Sampler sampler = new Sampler(centers, means, mults, modes, _isCats, sqr._sqr, k * 3, _parms.getOrMakeRealSeed(), hasWeightCol()).doAll(vecs); centers = ArrayUtils.append(centers, sampler._sampled); // Fill in sample centers into the model model._output._centers_raw = destandardize(centers, _isCats, means, mults); model._output._tot_withinss = sqr._sqr / _train.numRows(); model._output._iterations++; // One iteration done model.update(_job); // Make early version of model visible, but don't update progress using update(1) if (stop_requested()) { if (timeout()) warn("_max_runtime_secs reached.", "KMeans exited before finishing all iterations."); break; // Stopped/cancelled } } // Recluster down to k cluster centers centers = recluster(centers, rand, k, _parms._init, _isCats); model._output._iterations = 0; // Reset iteration count } } assert(centers.length == k); return centers; } // Number of reinitialization attempts for preventing empty clusters transient private int _reinit_attempts; // Handle the case where some centers go dry. Rescue only 1 cluster // per iteration ('cause we only tracked the 1 worst row) boolean cleanupBadClusters( LloydsIterationTask task, final Vec[] vecs, final double[][] centers, final double[] means, final double[] mults, final int[] modes ) { // Find any bad clusters int clu; for( clu=0; clu<centers.length; clu++ ) if( task._size[clu] == 0 ) break; if( clu == centers.length ) return false; // No bad clusters long row = task._worst_row; Log.warn("KMeans: Re-initializing cluster " + clu + " to row " + row); data(centers[clu] = task._cMeans[clu], vecs, row, means, mults, modes); task._size[clu] = 1; //FIXME: PUBDEV-871 Some other cluster had their membership count reduced by one! (which one?) // Find any MORE bad clusters; we only fixed the first one for( clu=0; clu<centers.length; clu++ ) if( task._size[clu] == 0 ) break; if( clu == centers.length ) return false; // No MORE bad clusters // If we see 2 or more bad rows, just re-run Lloyds to get the // next-worst row. We don't count this as an iteration, because // we're not really adjusting the centers, we're trying to get // some centers *at-all*. Log.warn("KMeans: Re-running Lloyds to re-init another cluster"); if (_reinit_attempts++ < centers.length) { return true; // Rerun Lloyds, and assign points to centroids } else { _reinit_attempts = 0; return false; } } // Compute all interesting KMeans stats (errors & variances of clusters, // etc). Return new centers. double[][] computeStatsFillModel(LloydsIterationTask task, KMeansModel model, final Vec[] vecs, final double[] means, final double[] mults, final int[] modes, int k) { // Fill in the model based on original destandardized centers if (model._parms._standardize) { model._output._centers_std_raw = task._cMeans; } model._output._centers_raw = destandardize(task._cMeans, _isCats, means, mults); model._output._size = task._size; model._output._withinss = task._cSqr; double ssq = 0; // sum squared error for( int i=0; i<k; i++ ) ssq += model._output._withinss[i]; // sum squared error all clusters model._output._tot_withinss = ssq; // Sum-of-square distance from grand mean if(k == 1) { model._output._totss = model._output._tot_withinss; } else { // If data already standardized, grand mean is just the origin TotSS totss = new TotSS(means,mults,modes, train().domains(), train().cardinality()).doAll(vecs); model._output._totss = totss._tss; } model._output._betweenss = model._output._totss - model._output._tot_withinss; // MSE between-cluster model._output._iterations++; model._output._history_withinss = ArrayUtils.copyAndFillOf( model._output._history_withinss, model._output._history_withinss.length+1, model._output._tot_withinss); model._output._k = ArrayUtils.copyAndFillOf(model._output._k, model._output._k.length+1, k); model._output._training_time_ms = ArrayUtils.copyAndFillOf(model._output._training_time_ms, model._output._training_time_ms.length+1, System.currentTimeMillis()); model._output._reassigned_count = ArrayUtils.copyAndFillOf(model._output._reassigned_count, model._output._reassigned_count.length+1, task._reassigned_count); // Two small TwoDimTables - cheap model._output._model_summary = createModelSummaryTable(model._output); model._output._scoring_history = createScoringHistoryTable(model._output); // Take the cluster stats from the model, and assemble them into a model metrics object model._output._training_metrics = makeTrainingMetrics(model); return task._cMeans; // New centers } // Main worker thread @Override public void computeImpl() { KMeansModel model = null; Key bestOutputKey = Key.make(); try { init(true); // Do lock even before checking the errors, since this block is finalized by unlock // (not the best solution, but the code is more readable) // Something goes wrong if( error_count() > 0 ) throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(KMeans.this); // The model to be built // Set fold_column to null and will be added back into model parameter after String fold_column = _parms._fold_column; _parms._fold_column = null; model = new KMeansModel(dest(), _parms, new KMeansModel.KMeansOutput(KMeans.this)); model.delete_and_lock(_job); int startK = _parms._estimate_k ? 1 : _parms._k; // final Vec vecs[] = _train.vecs(); // mults & means for standardization final double[] means = _train.means(); // means are used to impute NAs final double[] mults = _parms._standardize ? _train.mults() : null; final int [] impute_cat = new int[vecs.length]; for(int i = 0; i < vecs.length; i++) impute_cat[i] = vecs[i].isNumeric() ? -1 : DataInfo.imputeCat(vecs[i],true); model._output._normSub = means; model._output._normMul = mults; model._output._mode = impute_cat; // Initialize cluster centers and standardize if requested double[][] centers = initial_centers(model,vecs,means,mults,impute_cat, startK); if( centers==null ) return; // Stopped/cancelled during center-finding boolean work_unit_iter = !_parms._estimate_k; // --- // Run the main KMeans Clustering loop // Stop after enough iterations or reassigned_count < TOLERANCE * num_rows double sum_squares = 0; final double rel_improvement_cutoff = Math.min(0.02 + 10. / _train.numRows() + 2.5 / Math.pow(model._output.nfeatures(), 2), 0.8); if (_parms._estimate_k) Log.info("Cutoff for relative improvement in within_cluster_sum_of_squares: " + rel_improvement_cutoff); Vec[] vecs2 = Arrays.copyOf(vecs, vecs.length+1); vecs2[vecs2.length-1] = vecs2[0].makeCon(-1); for (int k = startK; k <= _parms._k; ++k) { Log.info("Running Lloyds iteration for " + k + " centroids."); model._output._iterations = 0; // Loop ends only when iterations > max_iterations with strict inequality double[][] lo=null, hi=null; boolean stop = false; do { //Lloyds algorithm assert(centers.length == k); LloydsIterationTask task = new LloydsIterationTask(centers, means, mults, impute_cat, _isCats, k, hasWeightCol()).doAll(vecs2); //1 PASS OVER THE DATA // Pick the max categorical level for cluster center max_cats(task._cMeans, task._cats, _isCats); // Handle the case where some centers go dry. Rescue only 1 cluster // per iteration ('cause we only tracked the 1 worst row) if( !_parms._estimate_k && cleanupBadClusters(task,vecs,centers,means,mults,impute_cat) ) continue; // Compute model stats; update standardized cluster centers centers = computeStatsFillModel(task, model, vecs, means, mults, impute_cat, k); if (model._parms._score_each_iteration) Log.info(model._output._model_summary); lo = task._lo; hi = task._hi; if (work_unit_iter) { model.update(_job); // Update model in K/V store _job.update(1); //1 more Lloyds iteration } stop = (task._reassigned_count < Math.max(1,train().numRows()*TOLERANCE) || model._output._iterations >= _parms._max_iterations || stop_requested()); if (stop) { if (model._output._iterations < _parms._max_iterations) Log.info("Lloyds converged after " + model._output._iterations + " iterations."); else Log.info("Lloyds stopped after " + model._output._iterations + " iterations."); } } while (!stop); double sum_squares_now = model._output._tot_withinss; double rel_improvement; if (sum_squares==0) { rel_improvement = 1; } else { rel_improvement = (sum_squares - sum_squares_now) / sum_squares; } Log.info("Relative improvement in total withinss: " + rel_improvement); sum_squares = sum_squares_now; if (_parms._estimate_k && k > 1) { boolean outerConverged = rel_improvement < rel_improvement_cutoff; if (outerConverged) { KMeansModel.KMeansOutput best = DKV.getGet(bestOutputKey); model._output = best; Log.info("Converged. Retrieving the best model with k=" + model._output._k[model._output._k.length-1]); break; } } if (!work_unit_iter) { DKV.put(bestOutputKey, IcedUtils.deepCopy(model._output)); //store a clone to avoid sharing the state between DKV and here model.update(_job); // Update model in K/V store _job.update(1); //1 more round for auto-clustering } if (lo != null && hi != null && _parms._estimate_k) centers = splitLargestCluster(centers, lo, hi, means, mults, impute_cat, vecs2, k); } //k-finder vecs2[vecs2.length-1].remove(); // Create metrics by scoring on training set otherwise scores are based on last Lloyd iteration model.score(_train).delete(); model._output._training_metrics = ModelMetrics.getFromDKV(model,_train); Log.info(model._output._model_summary); Log.info(model._output._scoring_history); Log.info(((ModelMetricsClustering)model._output._training_metrics).createCentroidStatsTable().toString()); // At the end: validation scoring (no need to gather scoring history) if (_valid != null) { model.score(_parms.valid()).delete(); //this appends a ModelMetrics on the validation set model._output._validation_metrics = ModelMetrics.getFromDKV(model,_parms.valid()); } model._parms._fold_column = fold_column; model.update(_job); // Update model in K/V store } finally { if( model != null ) model.unlock(_job); DKV.remove(bestOutputKey); } } double[][] splitLargestCluster(double[][] centers, double[][] lo, double[][] hi, double[] means, double[] mults, int[] impute_cat, Vec[] vecs2, int k) { double[][] newCenters = Arrays.copyOf(centers, centers.length + 1); for (int i = 0; i < centers.length; ++i) newCenters[i] = centers[i].clone(); double maxRange=0; int clusterToSplit=0; int dimToSplit=0; for (int i = 0; i < centers.length; ++i) { double[] range = new double[hi[i].length]; for( int col=0; col<hi[i].length; col++ ) { if (_isCats[col]!=null) continue; // can't split a cluster along categorical direction range[col] = hi[i][col] - lo[i][col]; if ((float)range[col] > (float)maxRange) { //break ties clusterToSplit = i; dimToSplit = col; maxRange = range[col]; } } // Log.info("Range for cluster " + i + ": " + Arrays.toString(range)); } // start out new centroid as a copy of the one to split assert (_isCats[dimToSplit] == null); double splitPoint = newCenters[clusterToSplit][dimToSplit]; // Log.info("Splitting cluster " + clusterToSplit + " in half in dimension " + dimToSplit + " at splitpoint: " + splitPoint); // compute the centroids of the two sub-clusters SplitTask task = new SplitTask(newCenters, means, mults, impute_cat, _isCats, k+1, hasWeightCol(), clusterToSplit, dimToSplit, splitPoint).doAll(vecs2); // Log.info("Splitting: " + Arrays.toString(newCenters[clusterToSplit])); newCenters[clusterToSplit] = task._cMeans[clusterToSplit].clone(); // Log.info("Into One: " + Arrays.toString(newCenters[clusterToSplit])); newCenters[newCenters.length-1] = task._cMeans[newCenters.length-1].clone(); // Log.info(" Two: " + Arrays.toString(newCenters[newCenters.length-1])); return newCenters; } private TwoDimTable createModelSummaryTable(KMeansModel.KMeansOutput output) { List<String> colHeaders = new ArrayList<>(); List<String> colTypes = new ArrayList<>(); List<String> colFormat = new ArrayList<>(); colHeaders.add("Number of Rows"); colTypes.add("long"); colFormat.add("%d"); colHeaders.add("Number of Clusters"); colTypes.add("long"); colFormat.add("%d"); colHeaders.add("Number of Categorical Columns"); colTypes.add("long"); colFormat.add("%d"); colHeaders.add("Number of Iterations"); colTypes.add("long"); colFormat.add("%d"); colHeaders.add("Within Cluster Sum of Squares"); colTypes.add("double"); colFormat.add("%.5f"); colHeaders.add("Total Sum of Squares"); colTypes.add("double"); colFormat.add("%.5f"); colHeaders.add("Between Cluster Sum of Squares"); colTypes.add("double"); colFormat.add("%.5f"); final int rows = 1; TwoDimTable table = new TwoDimTable( "Model Summary", null, new String[rows], colHeaders.toArray(new String[0]), colTypes.toArray(new String[0]), colFormat.toArray(new String[0]), ""); int row = 0; int col = 0; table.set(row, col++, Math.round(_train.numRows() * (hasWeightCol() ? _train.lastVec().mean() : 1))); table.set(row, col++, output._centers_raw.length); table.set(row, col++, output._categorical_column_count); table.set(row, col++, output._k.length-1); table.set(row, col++, output._tot_withinss); table.set(row, col++, output._totss); table.set(row, col++, output._betweenss); return table; } private TwoDimTable createScoringHistoryTable(KMeansModel.KMeansOutput output) { List<String> colHeaders = new ArrayList<>(); List<String> colTypes = new ArrayList<>(); List<String> colFormat = new ArrayList<>(); colHeaders.add("Timestamp"); colTypes.add("string"); colFormat.add("%s"); colHeaders.add("Duration"); colTypes.add("string"); colFormat.add("%s"); colHeaders.add("Iterations"); colTypes.add("long"); colFormat.add("%d"); if (_parms._estimate_k) { colHeaders.add("Number of Clusters"); colTypes.add("long"); colFormat.add("%d"); } colHeaders.add("Number of Reassigned Observations"); colTypes.add("long"); colFormat.add("%d"); colHeaders.add("Within Cluster Sum Of Squares"); colTypes.add("double"); colFormat.add("%.5f"); final int rows = output._history_withinss.length; TwoDimTable table = new TwoDimTable( "Scoring History", null, new String[rows], colHeaders.toArray(new String[0]), colTypes.toArray(new String[0]), colFormat.toArray(new String[0]), ""); int row = 0; for( int i = 0; i<rows; i++ ) { int col = 0; assert(row < table.getRowDim()); assert(col < table.getColDim()); DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"); table.set(row, col++, fmt.print(output._training_time_ms[i])); table.set(row, col++, PrettyPrint.msecs(output._training_time_ms[i]-_job.start_time(), true)); table.set(row, col++, i); if (_parms._estimate_k) table.set(row, col++, output._k[i]); table.set(row, col++, output._reassigned_count[i]); table.set(row, col++, output._history_withinss[i]); row++; } return table; } } // ------------------------------------------------------------------------- // Initial sum-of-square-distance to nearest cluster center private static class TotSS extends MRTask<TotSS> { // IN final double[] _means, _mults; final int[] _modes; final String[][] _isCats; final int[] _card; // OUT double _tss; double[] _gc; // Grand center (mean of cols) TotSS(double[] means, double[] mults, int[] modes, String[][] isCats, int[] card) { _means = means; _mults = mults; _modes = modes; _tss = 0; _isCats = isCats; _card = card; // Mean of numeric col is zero when standardized _gc = mults!=null ? new double[means.length] : Arrays.copyOf(means, means.length); for(int i=0; i<means.length; i++) { if(isCats[i] != null) _gc[i] = _modes[i]; } } @Override public void map(Chunk[] cs) { for( int row = 0; row < cs[0]._len; row++ ) { double[] values = new double[cs.length]; // fetch the data - using consistent NA and categorical data handling (same as for training) data(values, cs, row, _means, _mults, _modes); // compute the distance from the (standardized) cluster centroids _tss += hex.genmodel.GenModel.KMeans_distance(_gc, values, _isCats); } } @Override public void reduce(TotSS other) { _tss += other._tss; } } // ------------------------------------------------------------------------- // Initial sum-of-square-distance to nearest cluster center private static class SumSqr extends MRTask<SumSqr> { // IN double[][] _centers; double[] _means, _mults; // Standardization int[] _modes; // Imputation of missing categoricals final String[][] _isCats; // OUT double _sqr; SumSqr( double[][] centers, double[] means, double[] mults, int[] modes, String[][] isCats ) { _centers = centers; _means = means; _mults = mults; _modes = modes; _isCats = isCats; } @Override public void map(Chunk[] cs) { double[] values = new double[cs.length]; ClusterDist cd = new ClusterDist(); for( int row = 0; row < cs[0]._len; row++ ) { data(values, cs, row, _means, _mults, _modes); _sqr += minSqr(_centers, values, _isCats, cd); } _means = _mults = null; _modes = null; _centers = null; } @Override public void reduce(SumSqr other) { _sqr += other._sqr; } } // ------------------------------------------------------------------------- // Sample rows with increasing probability the farther they are from any // cluster center. private static class Sampler extends MRTask<Sampler> { // IN double[][] _centers; double[] _means, _mults; // Standardization int[] _modes; // Imputation of missing categoricals final String[][] _isCats; final double _sqr; // Min-square-error final double _probability; // Odds to select this point final long _seed; boolean _hasWeight; // OUT double[][] _sampled; // New cluster centers Sampler( double[][] centers, double[] means, double[] mults, int[] modes, String[][] isCats, double sqr, double prob, long seed, boolean hasWeight ) { _centers = centers; _means = means; _mults = mults; _modes = modes; _isCats = isCats; _sqr = sqr; _probability = prob; _seed = seed; _hasWeight = hasWeight; } @Override public void map(Chunk[] cs) { int N = cs.length - (_hasWeight?1:0); double[] values = new double[N]; ArrayList<double[]> list = new ArrayList<>(); Random rand = RandomUtils.getRNG(0); ClusterDist cd = new ClusterDist(); for( int row = 0; row < cs[0]._len; row++ ) { rand.setSeed(_seed + cs[0].start()+row); data(values, cs, row, _means, _mults, _modes); double sqr = minSqr(_centers, values, _isCats, cd); if( _probability * sqr > rand.nextDouble() * _sqr ) list.add(values.clone()); } _sampled = new double[list.size()][]; list.toArray(_sampled); _centers = null; _means = _mults = null; _modes = null; } @Override public void reduce(Sampler other) { _sampled = ArrayUtils.append(_sampled, other._sampled); } } // --------------------------------------- // A Lloyd's pass: // Find nearest cluster center for every point // Compute new mean/center & variance & rows for each cluster // Compute distance between clusters // Compute total sqr distance private static class LloydsIterationTask extends MRTask<LloydsIterationTask> { // IN double[][] _centers; double[] _means, _mults; // Standardization int[] _modes; // Imputation of missing categoricals final int _k; final String[][] _isCats; boolean _hasWeight; // OUT double[][] _lo, _hi; // Bounding box double _reassigned_count; double[][] _cMeans; // Means for each cluster long[/*k*/][/*features*/][/*nfactors*/] _cats; // Histogram of cat levels double[] _cSqr; // Sum of squares for each cluster long[] _size; // Number of rows in each cluster long _worst_row; // Row with max err double _worst_err; // Max-err-row's max-err LloydsIterationTask(double[][] centers, double[] means, double[] mults, int[] modes, String[][] isCats, int k, boolean hasWeight ) { _centers = centers; _means = means; _mults = mults; _modes = modes; _isCats = isCats; _k = k; _hasWeight = hasWeight; } @Override public void map(Chunk[] cs) { int N = cs.length - (_hasWeight ? 1:0) - 1 /*clusterassignment*/; assert _centers[0].length==N; _lo = new double[_k][N]; for( int clu=0; clu< _k; clu++ ) Arrays.fill(_lo[clu], Double.MAX_VALUE); _hi = new double[_k][N]; for( int clu=0; clu< _k; clu++ ) Arrays.fill(_hi[clu], -Double.MAX_VALUE); _cMeans = new double[_k][N]; _cSqr = new double[_k]; _size = new long[_k]; // Space for cat histograms _cats = new long[_k][N][]; for( int clu=0; clu< _k; clu++ ) for( int col=0; col<N; col++ ) _cats[clu][col] = _isCats[col]==null ? null : new long[cs[col].vec().cardinality()]; _worst_err = 0; Chunk assignment = cs[cs.length-1]; // Find closest cluster center for each row double[] values = new double[N]; // Temp data to hold row as doubles ClusterDist cd = new ClusterDist(); for( int row = 0; row < cs[0]._len; row++ ) { double weight = _hasWeight ? cs[N].atd(row) : 1; if (weight == 0) continue; //skip holdout rows assert(weight == 1); //K-Means only works for weight 1 (or weight 0 for holdout) data(values, cs, row, _means, _mults, _modes); // Load row as doubles closest(_centers, values, _isCats, cd); // Find closest cluster center if (cd._cluster != assignment.at8(row)) { _reassigned_count+=weight; assignment.set(row, cd._cluster); } for( int clu=0; clu< _k; clu++ ) { for( int col=0; col<N; col++ ) { if (cd._cluster == clu) { _lo[clu][col] = Math.min(values[col], _lo[clu][col]); _hi[clu][col] = Math.max(values[col], _hi[clu][col]); } } } int clu = cd._cluster; assert clu != -1; // No broken rows _cSqr[clu] += cd._dist; // Add values and increment counter for chosen cluster for( int col = 0; col < N; col++ ) if( _isCats[col] != null ) _cats[clu][col][(int)values[col]]++; // Histogram the cats else _cMeans[clu][col] += values[col]; // Sum the column centers _size[clu]++; // Track worst row if( cd._dist > _worst_err) { _worst_err = cd._dist; _worst_row = cs[0].start()+row; } } // Scale back down to local mean for( int clu = 0; clu < _k; clu++ ) if( _size[clu] != 0 ) ArrayUtils.div(_cMeans[clu], _size[clu]); _centers = null; _means = _mults = null; _modes = null; } @Override public void reduce(LloydsIterationTask mr) { _reassigned_count += mr._reassigned_count; for( int clu = 0; clu < _k; clu++ ) { long ra = _size[clu]; long rb = mr._size[clu]; double[] ma = _cMeans[clu]; double[] mb = mr._cMeans[clu]; for( int c = 0; c < ma.length; c++ ) // Recursive mean if( ra+rb > 0 ) ma[c] = (ma[c] * ra + mb[c] * rb) / (ra + rb); } ArrayUtils.add(_cats, mr._cats); ArrayUtils.add(_cSqr, mr._cSqr); ArrayUtils.add(_size, mr._size); for( int clu=0; clu< _k; clu++ ) { for( int col=0; col<_lo[clu].length; col++ ) { _lo[clu][col] = Math.min(mr._lo[clu][col], _lo[clu][col]); _hi[clu][col] = Math.max(mr._hi[clu][col], _hi[clu][col]); } } // track global worst-row if( _worst_err < mr._worst_err) { _worst_err = mr._worst_err; _worst_row = mr._worst_row; } } } // A pair result: nearest cluster center and the square distance private static final class ClusterDist { int _cluster; double _dist; } private static double minSqr(double[][] centers, double[] point, String[][] isCats, ClusterDist cd) { return closest(centers, point, isCats, cd, centers.length)._dist; } private static double minSqr(double[][] centers, double[] point, String[][] isCats, ClusterDist cd, int count) { return closest(centers,point,isCats,cd,count)._dist; } private static ClusterDist closest(double[][] centers, double[] point, String[][] isCats, ClusterDist cd) { return closest(centers, point, isCats, cd, centers.length); } /** Return both nearest of N cluster center/centroids, and the square-distance. */ private static ClusterDist closest(double[][] centers, double[] point, String[][] isCats, ClusterDist cd, int count) { int min = -1; double minSqr = Double.MAX_VALUE; for( int cluster = 0; cluster < count; cluster++ ) { double sqr = hex.genmodel.GenModel.KMeans_distance(centers[cluster],point,isCats); if( sqr < minSqr ) { // Record nearest cluster min = cluster; minSqr = sqr; } } cd._cluster = min; // Record nearest cluster cd._dist = minSqr; // Record square-distance return cd; // Return for flow-coding } // KMeans++ re-clustering private static double[][] recluster(double[][] points, Random rand, int N, Initialization init, String[][] isCats) { double[][] res = new double[N][]; res[0] = points[0]; int count = 1; ClusterDist cd = new ClusterDist(); switch( init ) { case Random: break; case PlusPlus: { // k-means++ while( count < res.length ) { double sum = 0; for (double[] point1 : points) sum += minSqr(res, point1, isCats, cd, count); for (double[] point : points) { if (minSqr(res, point, isCats, cd, count) >= rand.nextDouble() * sum) { res[count++] = point; break; } } } break; } case Furthest: { // Takes cluster center further from any already chosen ones while( count < res.length ) { double max = 0; int index = 0; for( int i = 0; i < points.length; i++ ) { double sqr = minSqr(res, points[i], isCats, cd, count); if( sqr > max ) { max = sqr; index = i; } } res[count++] = points[index]; } break; } default: throw H2O.fail(); } return res; } private void randomRow(Vec[] vecs, Random rand, double[] center, double[] means, double[] mults, int[] modes) { long row = Math.max(0, (long) (rand.nextDouble() * vecs[0].length()) - 1); data(center, vecs, row, means, mults, modes); } // Pick most common cat level for each cluster_centers' cat columns private static double[][] max_cats(double[][] centers, long[][][] cats, String[][] isCats) { for( int clu = 0; clu < centers.length; clu++ ) for( int col = 0; col < centers[0].length; col++ ) if( isCats[col] != null ) centers[clu][col] = ArrayUtils.maxIndex(cats[clu][col]); return centers; } private static double[][] destandardize(double[][] centers, String[][] isCats, double[] means, double[] mults) { int K = centers.length; int N = centers[0].length; double[][] value = new double[K][N]; for( int clu = 0; clu < K; clu++ ) { System.arraycopy(centers[clu],0,value[clu],0,N); if( mults!=null ) { // Reverse standardization for( int col = 0; col < N; col++) if( isCats[col] == null ) value[clu][col] = value[clu][col] / mults[col] + means[col]; } } return value; } private static void data(double[] values, Vec[] vecs, long row, double[] means, double[] mults, int[] modes) { for( int i = 0; i < values.length; i++ ) { values[i] = Kmeans_preprocessData(vecs[i].at(row), i, means, mults, modes); } } private static void data(double[] values, Chunk[] chks, int row, double[] means, double[] mults, int[] modes) { for( int i = 0; i < values.length; i++ ) { values[i] = Kmeans_preprocessData(chks[i].atd(row), i, means, mults, modes); } } /** * This helper creates a ModelMetricsClustering from a trained model * @param model, must contain valid statistics from training, such as _betweenss etc. */ private ModelMetricsClustering makeTrainingMetrics(KMeansModel model) { ModelMetricsClustering mm = new ModelMetricsClustering(model, train()); mm._size = model._output._size; mm._withinss = model._output._withinss; mm._betweenss = model._output._betweenss; mm._totss = model._output._totss; mm._tot_withinss = model._output._tot_withinss; model.addMetrics(mm); return mm; } private static class SplitTask extends MRTask<SplitTask> { // IN double[][] _centers; double[] _means, _mults; // Standardization int[] _modes; // Imputation of missing categoricals final int _k; final String[][] _isCats; final boolean _hasWeight; final int _clusterToSplit; final int _dimToSplit; final double _splitPoint; // OUT double[][] _cMeans; // Means for each cluster long[] _size; // Number of rows in each cluster SplitTask(double[][] centers, double[] means, double[] mults, int[] modes, String[][] isCats, int k, boolean hasWeight, int clusterToSplit, int dimToSplit, double splitPoint) { _centers = centers; _means = means; _mults = mults; _modes = modes; _isCats = isCats; _k = k; _hasWeight = hasWeight; _clusterToSplit = clusterToSplit; _dimToSplit = dimToSplit; _splitPoint = splitPoint; } @Override public void map(Chunk[] cs) { int N = cs.length - (_hasWeight ? 1:0) - 1 /*clusterassignment*/; assert _centers[0].length==N; _cMeans = new double[_k][N]; _size = new long[_k]; Chunk assignment = cs[cs.length-1]; // Find closest cluster center for each row double[] values = new double[N]; // Temp data to hold row as doubles ClusterDist cd = new ClusterDist(); for( int row = 0; row < cs[0]._len; row++ ) { if (assignment.at8(row) != _clusterToSplit) continue; double weight = _hasWeight ? cs[N].atd(row) : 1; if (weight == 0) continue; //skip holdout rows assert(weight == 1); //K-Means only works for weight 1 (or weight 0 for holdout) data(values, cs, row, _means, _mults, _modes); // Load row as doubles assert (_isCats[_dimToSplit]==null); if (values[_dimToSplit] > _centers[_clusterToSplit][_dimToSplit]) { cd._cluster = _centers.length-1; assignment.set(row, cd._cluster); } else { cd._cluster = _clusterToSplit; } int clu = cd._cluster; assert clu != -1; // No broken rows // Add values and increment counter for chosen cluster for( int col = 0; col < N; col++ ) _cMeans[clu][col] += values[col]; // Sum the column centers _size[clu]++; } // Scale back down to local mean for( int clu = 0; clu < _k; clu++ ) if( _size[clu] != 0 ) ArrayUtils.div(_cMeans[clu], _size[clu]); _centers = null; _means = _mults = null; _modes = null; } @Override public void reduce(SplitTask mr) { for( int clu = 0; clu < _k; clu++ ) { long ra = _size[clu]; long rb = mr._size[clu]; double[] ma = _cMeans[clu]; double[] mb = mr._cMeans[clu]; for( int c = 0; c < ma.length; c++ ) // Recursive mean if( ra+rb > 0 ) ma[c] = (ma[c] * ra + mb[c] * rb) / (ra + rb); } ArrayUtils.add(_size, mr._size); } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2019.01.11 at 02:39:34 PM EST // package schemas.docbook; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElements; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlIDREF; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;sequence> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://docbook.org/ns/docbook}title"/> * &lt;element ref="{http://docbook.org/ns/docbook}titleabbrev"/> * &lt;element ref="{http://docbook.org/ns/docbook}subtitle"/> * &lt;/choice> * &lt;element ref="{http://docbook.org/ns/docbook}info" minOccurs="0"/> * &lt;/sequence> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://docbook.org/ns/docbook}itemizedlist"/> * &lt;element ref="{http://docbook.org/ns/docbook}orderedlist"/> * &lt;element ref="{http://docbook.org/ns/docbook}procedure"/> * &lt;element ref="{http://docbook.org/ns/docbook}simplelist"/> * &lt;element ref="{http://docbook.org/ns/docbook}variablelist"/> * &lt;element ref="{http://docbook.org/ns/docbook}segmentedlist"/> * &lt;element ref="{http://docbook.org/ns/docbook}glosslist"/> * &lt;element ref="{http://docbook.org/ns/docbook}bibliolist"/> * &lt;element ref="{http://docbook.org/ns/docbook}calloutlist"/> * &lt;element ref="{http://docbook.org/ns/docbook}qandaset"/> * &lt;element ref="{http://docbook.org/ns/docbook}example"/> * &lt;element ref="{http://docbook.org/ns/docbook}figure"/> * &lt;element ref="{http://docbook.org/ns/docbook}table"/> * &lt;element ref="{http://docbook.org/ns/docbook}equation"/> * &lt;element ref="{http://docbook.org/ns/docbook}informalexample"/> * &lt;element ref="{http://docbook.org/ns/docbook}informalfigure"/> * &lt;element ref="{http://docbook.org/ns/docbook}informaltable"/> * &lt;element ref="{http://docbook.org/ns/docbook}informalequation"/> * &lt;element ref="{http://docbook.org/ns/docbook}sidebar"/> * &lt;element ref="{http://docbook.org/ns/docbook}blockquote"/> * &lt;element ref="{http://docbook.org/ns/docbook}address"/> * &lt;element ref="{http://docbook.org/ns/docbook}epigraph"/> * &lt;element ref="{http://docbook.org/ns/docbook}mediaobject"/> * &lt;element ref="{http://docbook.org/ns/docbook}screenshot"/> * &lt;element ref="{http://docbook.org/ns/docbook}task"/> * &lt;element ref="{http://docbook.org/ns/docbook}productionset"/> * &lt;element ref="{http://docbook.org/ns/docbook}constraintdef"/> * &lt;element ref="{http://docbook.org/ns/docbook}msgset"/> * &lt;element ref="{http://docbook.org/ns/docbook}screen"/> * &lt;element ref="{http://docbook.org/ns/docbook}literallayout"/> * &lt;element ref="{http://docbook.org/ns/docbook}programlistingco"/> * &lt;element ref="{http://docbook.org/ns/docbook}screenco"/> * &lt;element ref="{http://docbook.org/ns/docbook}programlisting"/> * &lt;element ref="{http://docbook.org/ns/docbook}synopsis"/> * &lt;element ref="{http://docbook.org/ns/docbook}bridgehead"/> * &lt;element ref="{http://docbook.org/ns/docbook}remark"/> * &lt;element ref="{http://docbook.org/ns/docbook}revhistory"/> * &lt;element ref="{http://docbook.org/ns/docbook}indexterm"/> * &lt;element ref="{http://docbook.org/ns/docbook}funcsynopsis"/> * &lt;element ref="{http://docbook.org/ns/docbook}classsynopsis"/> * &lt;element ref="{http://docbook.org/ns/docbook}methodsynopsis"/> * &lt;element ref="{http://docbook.org/ns/docbook}constructorsynopsis"/> * &lt;element ref="{http://docbook.org/ns/docbook}destructorsynopsis"/> * &lt;element ref="{http://docbook.org/ns/docbook}fieldsynopsis"/> * &lt;element ref="{http://docbook.org/ns/docbook}cmdsynopsis"/> * &lt;element ref="{http://docbook.org/ns/docbook}caution"/> * &lt;element ref="{http://docbook.org/ns/docbook}important"/> * &lt;element ref="{http://docbook.org/ns/docbook}note"/> * &lt;element ref="{http://docbook.org/ns/docbook}tip"/> * &lt;element ref="{http://docbook.org/ns/docbook}warning"/> * &lt;element ref="{http://docbook.org/ns/docbook}anchor"/> * &lt;element ref="{http://docbook.org/ns/docbook}para"/> * &lt;element ref="{http://docbook.org/ns/docbook}formalpara"/> * &lt;element ref="{http://docbook.org/ns/docbook}simpara"/> * &lt;element ref="{http://docbook.org/ns/docbook}annotation"/> * &lt;/choice> * &lt;choice> * &lt;element ref="{http://docbook.org/ns/docbook}bibliodiv" maxOccurs="unbounded"/> * &lt;choice maxOccurs="unbounded"> * &lt;element ref="{http://docbook.org/ns/docbook}biblioentry"/> * &lt;element ref="{http://docbook.org/ns/docbook}bibliomixed"/> * &lt;/choice> * &lt;/choice> * &lt;/sequence> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.linking.attributes"/> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.attributes"/> * &lt;attribute name="role" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;attribute name="label" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;attribute name="status" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "titlesAndTitleabbrevsAndSubtitles", "info", "itemizedlistsAndOrderedlistsAndProcedures", "biblioentriesAndBibliomixeds", "bibliodivs" }) @XmlRootElement(name = "bibliography") public class Bibliography { @XmlElements({ @XmlElement(name = "title", type = Title.class), @XmlElement(name = "titleabbrev", type = Titleabbrev.class), @XmlElement(name = "subtitle", type = Subtitle.class) }) protected List<Object> titlesAndTitleabbrevsAndSubtitles; protected Info info; @XmlElements({ @XmlElement(name = "itemizedlist", type = Itemizedlist.class), @XmlElement(name = "orderedlist", type = Orderedlist.class), @XmlElement(name = "procedure", type = Procedure.class), @XmlElement(name = "simplelist", type = Simplelist.class), @XmlElement(name = "variablelist", type = Variablelist.class), @XmlElement(name = "segmentedlist", type = Segmentedlist.class), @XmlElement(name = "glosslist", type = Glosslist.class), @XmlElement(name = "bibliolist", type = Bibliolist.class), @XmlElement(name = "calloutlist", type = Calloutlist.class), @XmlElement(name = "qandaset", type = Qandaset.class), @XmlElement(name = "example", type = Example.class), @XmlElement(name = "figure", type = Figure.class), @XmlElement(name = "table", type = Table.class), @XmlElement(name = "equation", type = Equation.class), @XmlElement(name = "informalexample", type = Informalexample.class), @XmlElement(name = "informalfigure", type = Informalfigure.class), @XmlElement(name = "informaltable", type = Informaltable.class), @XmlElement(name = "informalequation", type = Informalequation.class), @XmlElement(name = "sidebar", type = Sidebar.class), @XmlElement(name = "blockquote", type = Blockquote.class), @XmlElement(name = "address", type = Address.class), @XmlElement(name = "epigraph", type = Epigraph.class), @XmlElement(name = "mediaobject", type = Mediaobject.class), @XmlElement(name = "screenshot", type = Screenshot.class), @XmlElement(name = "task", type = Task.class), @XmlElement(name = "productionset", type = Productionset.class), @XmlElement(name = "constraintdef", type = Constraintdef.class), @XmlElement(name = "msgset", type = Msgset.class), @XmlElement(name = "screen", type = Screen.class), @XmlElement(name = "literallayout", type = Literallayout.class), @XmlElement(name = "programlistingco", type = Programlistingco.class), @XmlElement(name = "screenco", type = Screenco.class), @XmlElement(name = "programlisting", type = Programlisting.class), @XmlElement(name = "synopsis", type = Synopsis.class), @XmlElement(name = "bridgehead", type = Bridgehead.class), @XmlElement(name = "remark", type = Remark.class), @XmlElement(name = "revhistory", type = Revhistory.class), @XmlElement(name = "indexterm", type = Indexterm.class), @XmlElement(name = "funcsynopsis", type = Funcsynopsis.class), @XmlElement(name = "classsynopsis", type = Classsynopsis.class), @XmlElement(name = "methodsynopsis", type = Methodsynopsis.class), @XmlElement(name = "constructorsynopsis", type = Constructorsynopsis.class), @XmlElement(name = "destructorsynopsis", type = Destructorsynopsis.class), @XmlElement(name = "fieldsynopsis", type = Fieldsynopsis.class), @XmlElement(name = "cmdsynopsis", type = Cmdsynopsis.class), @XmlElement(name = "caution", type = Caution.class), @XmlElement(name = "important", type = Important.class), @XmlElement(name = "note", type = Note.class), @XmlElement(name = "tip", type = Tip.class), @XmlElement(name = "warning", type = Warning.class), @XmlElement(name = "anchor", type = Anchor.class), @XmlElement(name = "para", type = Para.class), @XmlElement(name = "formalpara", type = Formalpara.class), @XmlElement(name = "simpara", type = Simpara.class), @XmlElement(name = "annotation", type = Annotation.class) }) protected List<Object> itemizedlistsAndOrderedlistsAndProcedures; @XmlElements({ @XmlElement(name = "biblioentry", type = Biblioentry.class), @XmlElement(name = "bibliomixed", type = Bibliomixed.class) }) protected List<Object> biblioentriesAndBibliomixeds; @XmlElement(name = "bibliodiv") protected List<Bibliodiv> bibliodivs; @XmlAttribute(name = "role") @XmlSchemaType(name = "anySimpleType") protected String role; @XmlAttribute(name = "label") @XmlSchemaType(name = "anySimpleType") protected String label; @XmlAttribute(name = "status") @XmlSchemaType(name = "anySimpleType") protected String status; @XmlAttribute(name = "linkend") @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object linkend; @XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String href; @XmlAttribute(name = "type", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkType; @XmlAttribute(name = "role", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkRole; @XmlAttribute(name = "arcrole", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String arcrole; @XmlAttribute(name = "title", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkTitle; @XmlAttribute(name = "show", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String show; @XmlAttribute(name = "actuate", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String actuate; @XmlAttribute(name = "id", namespace = "http://www.w3.org/XML/1998/namespace") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "version") @XmlSchemaType(name = "anySimpleType") protected String commonVersion; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String xmlLang; @XmlAttribute(name = "base", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String base; @XmlAttribute(name = "remap") @XmlSchemaType(name = "anySimpleType") protected String remap; @XmlAttribute(name = "xreflabel") @XmlSchemaType(name = "anySimpleType") protected String xreflabel; @XmlAttribute(name = "revisionflag") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String revisionflag; @XmlAttribute(name = "dir") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String dir; @XmlAttribute(name = "arch") @XmlSchemaType(name = "anySimpleType") protected String arch; @XmlAttribute(name = "audience") @XmlSchemaType(name = "anySimpleType") protected String audience; @XmlAttribute(name = "condition") @XmlSchemaType(name = "anySimpleType") protected String condition; @XmlAttribute(name = "conformance") @XmlSchemaType(name = "anySimpleType") protected String conformance; @XmlAttribute(name = "os") @XmlSchemaType(name = "anySimpleType") protected String os; @XmlAttribute(name = "revision") @XmlSchemaType(name = "anySimpleType") protected String commonRevision; @XmlAttribute(name = "security") @XmlSchemaType(name = "anySimpleType") protected String security; @XmlAttribute(name = "userlevel") @XmlSchemaType(name = "anySimpleType") protected String userlevel; @XmlAttribute(name = "vendor") @XmlSchemaType(name = "anySimpleType") protected String vendor; @XmlAttribute(name = "wordsize") @XmlSchemaType(name = "anySimpleType") protected String wordsize; @XmlAttribute(name = "annotations") @XmlSchemaType(name = "anySimpleType") protected String annotations; /** * Gets the value of the titlesAndTitleabbrevsAndSubtitles property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the titlesAndTitleabbrevsAndSubtitles property. * * <p> * For example, to add a new item, do as follows: * <pre> * getTitlesAndTitleabbrevsAndSubtitles().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Title } * {@link Titleabbrev } * {@link Subtitle } * * */ public List<Object> getTitlesAndTitleabbrevsAndSubtitles() { if (titlesAndTitleabbrevsAndSubtitles == null) { titlesAndTitleabbrevsAndSubtitles = new ArrayList<Object>(); } return this.titlesAndTitleabbrevsAndSubtitles; } /** * Gets the value of the info property. * * @return * possible object is * {@link Info } * */ public Info getInfo() { return info; } /** * Sets the value of the info property. * * @param value * allowed object is * {@link Info } * */ public void setInfo(Info value) { this.info = value; } /** * Gets the value of the itemizedlistsAndOrderedlistsAndProcedures property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the itemizedlistsAndOrderedlistsAndProcedures property. * * <p> * For example, to add a new item, do as follows: * <pre> * getItemizedlistsAndOrderedlistsAndProcedures().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Itemizedlist } * {@link Orderedlist } * {@link Procedure } * {@link Simplelist } * {@link Variablelist } * {@link Segmentedlist } * {@link Glosslist } * {@link Bibliolist } * {@link Calloutlist } * {@link Qandaset } * {@link Example } * {@link Figure } * {@link Table } * {@link Equation } * {@link Informalexample } * {@link Informalfigure } * {@link Informaltable } * {@link Informalequation } * {@link Sidebar } * {@link Blockquote } * {@link Address } * {@link Epigraph } * {@link Mediaobject } * {@link Screenshot } * {@link Task } * {@link Productionset } * {@link Constraintdef } * {@link Msgset } * {@link Screen } * {@link Literallayout } * {@link Programlistingco } * {@link Screenco } * {@link Programlisting } * {@link Synopsis } * {@link Bridgehead } * {@link Remark } * {@link Revhistory } * {@link Indexterm } * {@link Funcsynopsis } * {@link Classsynopsis } * {@link Methodsynopsis } * {@link Constructorsynopsis } * {@link Destructorsynopsis } * {@link Fieldsynopsis } * {@link Cmdsynopsis } * {@link Caution } * {@link Important } * {@link Note } * {@link Tip } * {@link Warning } * {@link Anchor } * {@link Para } * {@link Formalpara } * {@link Simpara } * {@link Annotation } * * */ public List<Object> getItemizedlistsAndOrderedlistsAndProcedures() { if (itemizedlistsAndOrderedlistsAndProcedures == null) { itemizedlistsAndOrderedlistsAndProcedures = new ArrayList<Object>(); } return this.itemizedlistsAndOrderedlistsAndProcedures; } /** * Gets the value of the biblioentriesAndBibliomixeds property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the biblioentriesAndBibliomixeds property. * * <p> * For example, to add a new item, do as follows: * <pre> * getBiblioentriesAndBibliomixeds().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Biblioentry } * {@link Bibliomixed } * * */ public List<Object> getBiblioentriesAndBibliomixeds() { if (biblioentriesAndBibliomixeds == null) { biblioentriesAndBibliomixeds = new ArrayList<Object>(); } return this.biblioentriesAndBibliomixeds; } /** * Gets the value of the bibliodivs property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the bibliodivs property. * * <p> * For example, to add a new item, do as follows: * <pre> * getBibliodivs().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Bibliodiv } * * */ public List<Bibliodiv> getBibliodivs() { if (bibliodivs == null) { bibliodivs = new ArrayList<Bibliodiv>(); } return this.bibliodivs; } /** * Gets the value of the role property. * * @return * possible object is * {@link String } * */ public String getRole() { return role; } /** * Sets the value of the role property. * * @param value * allowed object is * {@link String } * */ public void setRole(String value) { this.role = value; } /** * Gets the value of the label property. * * @return * possible object is * {@link String } * */ public String getLabel() { return label; } /** * Sets the value of the label property. * * @param value * allowed object is * {@link String } * */ public void setLabel(String value) { this.label = value; } /** * Gets the value of the status property. * * @return * possible object is * {@link String } * */ public String getStatus() { return status; } /** * Sets the value of the status property. * * @param value * allowed object is * {@link String } * */ public void setStatus(String value) { this.status = value; } /** * Gets the value of the linkend property. * * @return * possible object is * {@link Object } * */ public Object getLinkend() { return linkend; } /** * Sets the value of the linkend property. * * @param value * allowed object is * {@link Object } * */ public void setLinkend(Object value) { this.linkend = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the xlinkType property. * * @return * possible object is * {@link String } * */ public String getXlinkType() { return xlinkType; } /** * Sets the value of the xlinkType property. * * @param value * allowed object is * {@link String } * */ public void setXlinkType(String value) { this.xlinkType = value; } /** * Gets the value of the xlinkRole property. * * @return * possible object is * {@link String } * */ public String getXlinkRole() { return xlinkRole; } /** * Sets the value of the xlinkRole property. * * @param value * allowed object is * {@link String } * */ public void setXlinkRole(String value) { this.xlinkRole = value; } /** * Gets the value of the arcrole property. * * @return * possible object is * {@link String } * */ public String getArcrole() { return arcrole; } /** * Sets the value of the arcrole property. * * @param value * allowed object is * {@link String } * */ public void setArcrole(String value) { this.arcrole = value; } /** * Gets the value of the xlinkTitle property. * * @return * possible object is * {@link String } * */ public String getXlinkTitle() { return xlinkTitle; } /** * Sets the value of the xlinkTitle property. * * @param value * allowed object is * {@link String } * */ public void setXlinkTitle(String value) { this.xlinkTitle = value; } /** * Gets the value of the show property. * * @return * possible object is * {@link String } * */ public String getShow() { return show; } /** * Sets the value of the show property. * * @param value * allowed object is * {@link String } * */ public void setShow(String value) { this.show = value; } /** * Gets the value of the actuate property. * * @return * possible object is * {@link String } * */ public String getActuate() { return actuate; } /** * Sets the value of the actuate property. * * @param value * allowed object is * {@link String } * */ public void setActuate(String value) { this.actuate = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the commonVersion property. * * @return * possible object is * {@link String } * */ public String getCommonVersion() { return commonVersion; } /** * Sets the value of the commonVersion property. * * @param value * allowed object is * {@link String } * */ public void setCommonVersion(String value) { this.commonVersion = value; } /** * Gets the value of the xmlLang property. * * @return * possible object is * {@link String } * */ public String getXmlLang() { return xmlLang; } /** * Sets the value of the xmlLang property. * * @param value * allowed object is * {@link String } * */ public void setXmlLang(String value) { this.xmlLang = value; } /** * Gets the value of the base property. * * @return * possible object is * {@link String } * */ public String getBase() { return base; } /** * Sets the value of the base property. * * @param value * allowed object is * {@link String } * */ public void setBase(String value) { this.base = value; } /** * Gets the value of the remap property. * * @return * possible object is * {@link String } * */ public String getRemap() { return remap; } /** * Sets the value of the remap property. * * @param value * allowed object is * {@link String } * */ public void setRemap(String value) { this.remap = value; } /** * Gets the value of the xreflabel property. * * @return * possible object is * {@link String } * */ public String getXreflabel() { return xreflabel; } /** * Sets the value of the xreflabel property. * * @param value * allowed object is * {@link String } * */ public void setXreflabel(String value) { this.xreflabel = value; } /** * Gets the value of the revisionflag property. * * @return * possible object is * {@link String } * */ public String getRevisionflag() { return revisionflag; } /** * Sets the value of the revisionflag property. * * @param value * allowed object is * {@link String } * */ public void setRevisionflag(String value) { this.revisionflag = value; } /** * Gets the value of the dir property. * * @return * possible object is * {@link String } * */ public String getDir() { return dir; } /** * Sets the value of the dir property. * * @param value * allowed object is * {@link String } * */ public void setDir(String value) { this.dir = value; } /** * Gets the value of the arch property. * * @return * possible object is * {@link String } * */ public String getArch() { return arch; } /** * Sets the value of the arch property. * * @param value * allowed object is * {@link String } * */ public void setArch(String value) { this.arch = value; } /** * Gets the value of the audience property. * * @return * possible object is * {@link String } * */ public String getAudience() { return audience; } /** * Sets the value of the audience property. * * @param value * allowed object is * {@link String } * */ public void setAudience(String value) { this.audience = value; } /** * Gets the value of the condition property. * * @return * possible object is * {@link String } * */ public String getCondition() { return condition; } /** * Sets the value of the condition property. * * @param value * allowed object is * {@link String } * */ public void setCondition(String value) { this.condition = value; } /** * Gets the value of the conformance property. * * @return * possible object is * {@link String } * */ public String getConformance() { return conformance; } /** * Sets the value of the conformance property. * * @param value * allowed object is * {@link String } * */ public void setConformance(String value) { this.conformance = value; } /** * Gets the value of the os property. * * @return * possible object is * {@link String } * */ public String getOs() { return os; } /** * Sets the value of the os property. * * @param value * allowed object is * {@link String } * */ public void setOs(String value) { this.os = value; } /** * Gets the value of the commonRevision property. * * @return * possible object is * {@link String } * */ public String getCommonRevision() { return commonRevision; } /** * Sets the value of the commonRevision property. * * @param value * allowed object is * {@link String } * */ public void setCommonRevision(String value) { this.commonRevision = value; } /** * Gets the value of the security property. * * @return * possible object is * {@link String } * */ public String getSecurity() { return security; } /** * Sets the value of the security property. * * @param value * allowed object is * {@link String } * */ public void setSecurity(String value) { this.security = value; } /** * Gets the value of the userlevel property. * * @return * possible object is * {@link String } * */ public String getUserlevel() { return userlevel; } /** * Sets the value of the userlevel property. * * @param value * allowed object is * {@link String } * */ public void setUserlevel(String value) { this.userlevel = value; } /** * Gets the value of the vendor property. * * @return * possible object is * {@link String } * */ public String getVendor() { return vendor; } /** * Sets the value of the vendor property. * * @param value * allowed object is * {@link String } * */ public void setVendor(String value) { this.vendor = value; } /** * Gets the value of the wordsize property. * * @return * possible object is * {@link String } * */ public String getWordsize() { return wordsize; } /** * Sets the value of the wordsize property. * * @param value * allowed object is * {@link String } * */ public void setWordsize(String value) { this.wordsize = value; } /** * Gets the value of the annotations property. * * @return * possible object is * {@link String } * */ public String getAnnotations() { return annotations; } /** * Sets the value of the annotations property. * * @param value * allowed object is * {@link String } * */ public void setAnnotations(String value) { this.annotations = value; } }
/* * Copyright 2014 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkArgument; import static com.google.javascript.jscomp.Es6ToEs3Util.createType; import static com.google.javascript.jscomp.Es6ToEs3Util.withType; import com.google.common.collect.Lists; import com.google.javascript.jscomp.parsing.parser.FeatureSet; import com.google.javascript.jscomp.parsing.parser.FeatureSet.Feature; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.JSTypeNative; import com.google.javascript.rhino.jstype.JSTypeRegistry; import java.util.ArrayList; import java.util.List; /** * Converts ES6 code to valid ES5 code. This class does most of the transpilation, and * https://github.com/google/closure-compiler/wiki/ECMAScript6 lists which ES6 features are * supported. Other classes that start with "Es6" do other parts of the transpilation. * * <p>In most cases, the output is valid as ES3 (hence the class name) but in some cases, if * the output language is set to ES5, we rely on ES5 features such as getters, setters, * and Object.defineProperties. * * @author [email protected] (Tyler Breisacher) */ // TODO(tbreisacher): This class does too many things. Break it into smaller passes. public final class LateEs6ToEs3Converter implements NodeTraversal.Callback, HotSwapCompilerPass { private final AbstractCompiler compiler; private static final FeatureSet transpiledFeatures = FeatureSet.BARE_MINIMUM.with( Feature.COMPUTED_PROPERTIES, Feature.MEMBER_DECLARATIONS, Feature.TEMPLATE_LITERALS); // addTypes indicates whether we should add type information when transpiling. private final boolean addTypes; private final JSTypeRegistry registry; private final JSType unknownType; private final JSType stringType; private static final String FRESH_COMP_PROP_VAR = "$jscomp$compprop"; public LateEs6ToEs3Converter(AbstractCompiler compiler) { this.compiler = compiler; // Only add type information if NTI has been run. this.addTypes = compiler.hasTypeCheckingRun(); this.registry = compiler.getTypeRegistry(); this.unknownType = createType(addTypes, registry, JSTypeNative.UNKNOWN_TYPE); this.stringType = createType(addTypes, registry, JSTypeNative.STRING_TYPE); } @Override public void process(Node externs, Node root) { TranspilationPasses.processTranspile(compiler, externs, transpiledFeatures, this); TranspilationPasses.processTranspile(compiler, root, transpiledFeatures, this); TranspilationPasses.maybeMarkFeaturesAsTranspiledAway(compiler, transpiledFeatures); } @Override public void hotSwapScript(Node scriptRoot, Node originalRoot) { TranspilationPasses.hotSwapTranspile(compiler, scriptRoot, transpiledFeatures, this); TranspilationPasses.maybeMarkFeaturesAsTranspiledAway(compiler, transpiledFeatures); } @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { switch (n.getToken()) { case GETTER_DEF: case SETTER_DEF: if (FeatureSet.ES3.contains(compiler.getOptions().getOutputFeatureSet())) { Es6ToEs3Util.cannotConvert( compiler, n, "ES5 getters/setters (consider using --language_out=ES5)"); return false; } break; case FUNCTION: if (n.isAsyncFunction()) { throw new IllegalStateException("async functions should have already been converted"); } break; default: break; } return true; } @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getToken()) { case OBJECTLIT: visitObject(n); break; case MEMBER_FUNCTION_DEF: if (parent.isObjectLit()) { visitMemberFunctionDefInObjectLit(n, parent); } break; case TAGGED_TEMPLATELIT: Es6TemplateLiterals.visitTaggedTemplateLiteral(t, n, addTypes); break; case TEMPLATELIT: if (!parent.isTaggedTemplateLit()) { Es6TemplateLiterals.visitTemplateLiteral(t, n, addTypes); } break; default: break; } } /** * Converts a member definition in an object literal to an ES3 key/value pair. * Member definitions in classes are handled in {@link Es6RewriteClass}. */ private void visitMemberFunctionDefInObjectLit(Node n, Node parent) { String name = n.getString(); Node nameNode = n.getFirstFirstChild(); Node stringKey = withType(IR.stringKey(name, n.getFirstChild().detach()), n.getJSType()); stringKey.setJSDocInfo(n.getJSDocInfo()); parent.replaceChild(n, stringKey); stringKey.useSourceInfoFrom(nameNode); compiler.reportChangeToEnclosingScope(stringKey); } private void visitObject(Node obj) { for (Node child : obj.children()) { if (child.isComputedProp()) { visitObjectWithComputedProperty(obj); return; } } } /** * Transpiles an object node with computed property, * and add type information to the new nodes if this pass ran after type checking. * For example,<pre> {@code * var obj = {a: 1, [i++]: 2} * is transpiled to * var $jscomp$compprop0 = {}; * var obj = ($jscomp$compprop0.a = 1, ($jscomp$compprop0[i++] = 2, $jscomp$compprop0)); * }</pre> * Note that when adding type information to the nodes, the NAME node $jscomp$compprop0 * would always be assigned the type of the entire object (in the above example {a: number}). * This is because we do not have sufficient type information during transpilation to know, * for example, $jscomp$compprop0 has type Object{} in the expression $jscomp$compprop0.a = 1 */ private void visitObjectWithComputedProperty(Node obj) { checkArgument(obj.isObjectLit()); List<Node> props = new ArrayList<>(); Node currElement = obj.getFirstChild(); JSType objectType = obj.getJSType(); while (currElement != null) { if (currElement.getBooleanProp(Node.COMPUTED_PROP_GETTER) || currElement.getBooleanProp(Node.COMPUTED_PROP_SETTER)) { Es6ToEs3Util.cannotConvertYet( compiler, currElement, "computed getter/setter in an object literal"); return; } else if (currElement.isGetterDef() || currElement.isSetterDef()) { currElement = currElement.getNext(); } else { Node nextNode = currElement.getNext(); obj.removeChild(currElement); props.add(currElement); currElement = nextNode; } } String objName = FRESH_COMP_PROP_VAR + compiler.getUniqueNameIdSupplier().get(); props = Lists.reverse(props); Node result = withType(IR.name(objName), objectType); for (Node propdef : props) { if (propdef.isComputedProp()) { Node propertyExpression = propdef.removeFirstChild(); Node value = propdef.removeFirstChild(); JSType valueType = value.getJSType(); result = withType( IR.comma( withType( IR.assign( withUnknownType( IR.getelem( withType(IR.name(objName), objectType), propertyExpression)), value), valueType), result), objectType); } else { Node val = propdef.removeFirstChild(); JSType valueType = val.getJSType(); Token token = propdef.isQuotedString() ? Token.GETELEM : Token.GETPROP; propdef.setToken(Token.STRING); propdef.setJSType(stringType); propdef.putBooleanProp(Node.QUOTED_PROP, false); Node access = withType(new Node(token, withType(IR.name(objName), objectType), propdef), valueType); result = withType(IR.comma(withType(IR.assign(access, val), valueType), result), objectType); } } Node statement = obj; while (!NodeUtil.isStatement(statement)) { statement = statement.getParent(); } result.useSourceInfoIfMissingFromForTree(obj); obj.replaceWith(result); JSType simpleObjectType = null; Node var = IR.var(withType(IR.name(objName), objectType), withType(obj, simpleObjectType)); var.useSourceInfoIfMissingFromForTree(statement); statement.getParent().addChildBefore(var, statement); compiler.reportChangeToEnclosingScope(var); } private Node withUnknownType(Node n) { return withType(n, unknownType); } }
/* Copyright 1996-2010 Ariba, Inc. All rights reserved. Patents pending. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. $Id: //ariba/platform/util/core/ariba/util/io/CSVReader.java#27 $ */ package ariba.util.io; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.LineNumberReader; import java.io.Reader; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLConnection; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.MalformedInputException; import java.util.List; import ariba.util.core.Assert; import ariba.util.core.FastStringBuffer; import ariba.util.core.Fmt; import ariba.util.core.IOUtil; import ariba.util.core.ListUtil; import ariba.util.core.SystemUtil; import ariba.util.log.Log; /** CSVReader parses files in CSV (comma separated values) format. Subclassers should implement the consumeLineOfTokens method. @aribaapi documented */ public final class CSVReader { public static final int ErrorMissingComma = 1; public static final int ErrorUnbalancedQuotes = 2; public static final int ErrorIllegalCharacterOrByteSequence = 3; //represents All Lines public static final int AllLines = -1; private static final char DoubleQuote = '"'; private static final char Comma = ','; private static final char CR = '\r'; private static final char LF = '\n'; private static final char Space = ' '; private static final char Tab = '\t'; private static final int StateFirstLine = 1; private static final int StateBeginningOfLine = 2; private static final int StateBeginningOfField = 3; private static final int StateEndOfField = 4; private static final int StateInUnquotedField = 5; private static final int StateInQuotedField = 6; private static final int StateEOF = 7; static final int TokenBufferSize = 8192; private CSVConsumer csvConsumer; private CSVErrorHandler csvErrorHandler; private String encoding; private boolean encodingIsExplicitlySet = false; private boolean returnNoValueAsNull = false; //Max number of lines to consider while reading. -1 implies all. private int numberOfLines = AllLines; /** Create a new CSVReader using a specific CSVConsumer to handle the rows. After reading has been performed, the CSVReader can be asked what the encoding was used. @param csvConsumer CSVConsumer for handling rows @aribaapi documented */ public CSVReader (CSVConsumer csvConsumer) { this(csvConsumer, null); } /** Create a new CSVReader using a specific CSVConsumer to handle the rows and a specified CSVErrorHandler to handle the errors. After reading has been performed, the CSVReader can be asked what the encoding was used. @param csvConsumer CSVConsumer for handling rows @param csvErrorHandler CSVErrorHandler for handling CSV format errors; if null, CSVDefaultErrorHandler will be used @aribaapi ariba */ public CSVReader (CSVConsumer csvConsumer, CSVErrorHandler csvErrorHandler) { this.csvConsumer = csvConsumer; if (csvErrorHandler == null) { this.csvErrorHandler = new CSVDefaultErrorHandler(); } else { this.csvErrorHandler = csvErrorHandler; } } /** This private constructor is needed by the static method readAllLines in order to initialize the inner class CSVConsumerHelper. */ private CSVReader () { } /** Return the encoding used for the last read operation. May be null if a Reader was passed in so we never know the underlying encoding if any. @return encoding name @aribaapi documented */ public String getEncoding () { return encoding; } /** Returns true if the file had the encoding as the first line in the file, e.g. 8859_1 or 8859_1,, @return true if encoding is set at the beginning of the file @aribaapi documented */ public boolean isEncodingExplicitlySet () { return encodingIsExplicitlySet; } /** Sets the number of lines consider while reading. @param n number of lines @aribaapi private */ public void setNumberOfLines (int n) { Assert.that(n >= AllLines, "Number of lines %s is not valid.", n); numberOfLines = n; } /** * Alters the behavior of the CSV Reader when parsing empty value * (i.e an <i>unquoted</i> empty or blank string). * * When set to <code>true</code> empty values are returned as null * while when set to <code>false</code> they are returned as empty String. * The default behavior is to return empty Strings, * * @param value the new value for the toggling attribute * @aribaapi documented */ public void setReturnEmptyValueAsNull (boolean value) { returnNoValueAsNull = value; } /** Reads the specified URL, using the character encoding for the default locale. @param url the URL to read the data from @param defaultEncoding the encoding to use to read the data if none can be determined from the URLConnection @exception IOException any IOException reading from the URL @aribaapi documented */ public void read (URL url, String defaultEncoding) throws IOException { URLConnection urlConnection = url.openConnection(); encoding = urlConnection.getContentEncoding(); if (encoding == null) { encoding = defaultEncoding; } Assert.that(encoding != null, "null encoding for %s", url); Reader in = IOUtil.bufferedReader(urlConnection.getInputStream(), encoding); read(in, url.toString()); in.close(); } /** Reads the specified file, using the character encoding for the default locale. @param file a path to the file to read @param encoding the encoding to use to read the data @exception IOException any IOException reading from the file could not be read. @aribaapi documented */ public void read (File file, String encoding) throws IOException { Reader in = IOUtil.bufferedReader(file, encoding); read(in, file.getCanonicalPath()); in.close(); } /** Will read from the specified stream in the encoding specified on the first line of the stream. For instance, the first line of the file may be "8859_1", or may be "8859_1,,,". The "8859_1" will be passed into read(inputStream, encoding, path) as the encoding. @param url the URL to read the data from @exception IOException any IOException reading from the URL @aribaapi documented */ public void readForSpecifiedEncoding (URL url) throws IOException { InputStream in = url.openStream(); try { readForSpecifiedEncoding(in, url.toString()); } finally { in.close(); } } /** Will read from the specified stream in the encoding specified on the first line of the stream. For instance, the first line of the file may be "8859_1", or may be "8859_1,,,". The "8859_1" will be passed into read(inputStream, encoding, path) as the encoding. @param file the path to the file to read @exception IOException if an IOException occurs while reading the file @see #read(Reader, String) @aribaapi documented */ public final void readForSpecifiedEncoding (File file) throws IOException { readForSpecifiedEncoding(file, null); } /** Will read from the specified stream in the encoding specified on the first line of the stream. For instance, the first line of the file may be "8859_1", or may be "8859_1,,,". The "8859_1" will be passed into read(inputStream, encoding, path) as the encoding. @param file the path to the file to read @param altEncoding alternate encoding to be used if not specified @exception IOException if an IOException occurs while reading the file @see #read(Reader, String) @aribaapi documented */ public final void readForSpecifiedEncoding (File file, String altEncoding) throws IOException { BufferedInputStream in = IOUtil.bufferedInputStream(file); readForSpecifiedEncoding(in, file.getCanonicalPath(), altEncoding); in.close(); } /** Will read from the specified stream in the encoding specified on the first line of the stream. For instance, the first line of the file may be "8859_1", or may be "8859_1,,,". The "8859_1" will be passed into read(inputStream, encoding, path) as the encoding. If there is no encoding line, this function should handle it gracefully. @param inputStream the InputStream to read the data from @param location the path to the data source for debugging messages @exception IOException if an IOException occurs while reading the file @see #read(Reader, String) @aribaapi documented */ public final void readForSpecifiedEncoding (InputStream inputStream, String location) throws IOException { readForSpecifiedEncoding(inputStream, location, null); } /** Will read from the specified stream in the encoding specified on the first line of the stream. For instance, the first line of the file may be "8859_1", or may be "8859_1,,,". The "8859_1" will be passed into read(inputStream, encoding, path) as the encoding. If there is no encoding line, use the altEncoding if available. Otherwise, use the system default encoding. @param inputStream the InputStream to read the data from @param location the path to the data source for debugging messages @param altEncoding alternate encoding to be used if not specified @exception IOException if an IOException occurs while reading the file @see #read(Reader, String) @aribaapi documented */ public final void readForSpecifiedEncoding (InputStream inputStream, String location, String altEncoding) throws IOException { if (!inputStream.markSupported()) { inputStream = new BufferedInputStream(inputStream); } inputStream.mark(2048); // extra buffering to make sure the reading of the first // line doesn't go past the length marked byte buf[] = new byte[2048]; int len = inputStream.read(buf, 0, 2048); inputStream.reset(); ByteArrayInputStream bais = new ByteArrayInputStream(buf,0, len); //check for unicode marker ByteBuffer bb = java.nio.ByteBuffer.wrap(buf); CharBuffer cb = bb.asCharBuffer(); char marker = cb.get(0); if (marker == 0xfffe || marker == 0xfeff) { Reader reader = IOUtil.bufferedReader(bais, "UTF-16"); LineNumberReader line = new LineNumberReader(reader); encoding = line.readLine(); } else { encoding = IOUtil.readLine(bais); } if (encoding == null) { //The file is completely empty return; } encoding = extractEncoding(encoding); try { // test the encoding new String(new byte[0], encoding); encodingIsExplicitlySet = true; // pull the first line (encoding) back out of the // buffer IOUtil.readLine(inputStream); } catch (UnsupportedEncodingException uee) { encodingIsExplicitlySet = false; if (altEncoding != null) { encoding = altEncoding; } else { encoding = IOUtil.getDefaultSystemEncoding(); } } catch (RuntimeException e) { /* We actually want to catch java.nio.charset.IllegalCharsetNameException but Tibco code has to run on JDK 1.3 so we can't catch it yet Defect 124155 has been filed to track this */ Log.util.debug("Runtime exception caught " + "while parsing encoding for a CSV file." + "Encoding read is '%s'.", encoding); encodingIsExplicitlySet = false; if (altEncoding != null) { encoding = altEncoding; } else { encoding = IOUtil.getDefaultSystemEncoding(); } } read(IOUtil.bufferedReader(inputStream, encoding), location); } /** Extract the encoding from the given string. The encoding may have trailing commas, or it may be double quoted. The double quotes are stripped from the encoding string. <p> Example: 8859_1 will be returned in all the following cases: (i) 8859_1,, (ii) "8859_1",, (iii) 8859_1 (iv) "8859_1". <p> Implementation details: to make this method efficient, an assumption is made that any valid encoding will not contain a comma. An input string "8859,_1" will result in the string "8859 (with the leading quote) returned. Strictly speaking, we should return 8859,_1, but note that both of these returned forms cannot be a valid encoding. The caller of this method will test the returned String to make sure it is a valid (and supported) encoding. So we just save the cpu cycles of parsing this correctly (they will both end up wrong anyway). To be really correct, we need to take care of escaped double quotes as well... It's just not worth the effort. <p> @param encoding the raw encoding String from the CSV file, must not be null. @return the encoding String. */ private String extractEncoding (String encoding) { /** finds the indices of the first and last non white spaces (if any) from the input string in the range [0..x), where x is the index of the Comma (if present) or the length of the string. Returns the chars in between. */ int loLimit = firstNonWhiteSpaceIndex(encoding); if (loLimit == encoding.length()) { // not much we can do. This is either an empty String or // all characters are spaces. Note that this would be // an invalid encoding, which will be handled by // readForSpecifiedEncoding return encoding; } int hiLimit = encoding.indexOf(Comma); if (hiLimit == -1) { hiLimit = encoding.length(); } hiLimit = lastNonWhiteSpaceIndexPlusOne(encoding, loLimit, hiLimit); if (encoding.charAt(loLimit) != DoubleQuote || encoding.charAt(hiLimit-1) != DoubleQuote) { return (loLimit == 0 && hiLimit == encoding.length()) ? encoding : encoding.substring(loLimit, hiLimit); } return encoding.substring(loLimit+1, hiLimit-1); } /** Returns the index of the first non white space (actually the first non ASCII control characters) of the given string. @param str the specified string, must not be null. @return the index of the first non white space (actually the first non ASCII control characters) of the given string. Note that a returned value equal to the str.length() means that all characters in the string are ASCII control characters. */ private int firstNonWhiteSpaceIndex (String str) { int st = 0; int len = str.length(); while ((st < len) && (str.charAt(st) <= Space)) { st++; } return st; } /** Returns the index of the last non white space (actually non ASCII control characters) of the given string. @param str the given string, must not be null. @param start the index to start searching. @param onePastEnd the index to stop searching. @return the index of the last non white space (actually non ASCII control characters) of the given string. Note that a returned value of start means that all the characters of the given string are ACSII control characters. */ private int lastNonWhiteSpaceIndexPlusOne (String str, int start, int onePastEnd) { int index = onePastEnd; while (start < index && str.charAt(index-1) <= Space) { index--; } return index; } /** The primitive read entry point. This method calls the CSVConsumer callback for each line read. @param reader the data source as passed in by one of the higher level read methods @param location the path for debugging messages @exception IOException if an IOException occurs while reading from <b>reader</b> @see ariba.util.io.CSVConsumer#consumeLineOfTokens @aribaapi documented */ public final void read (Reader reader, String location) throws IOException { // this is the number of the line currently begin processed int lineNumber = 1; // this is the line number passed to the consumeLineOfTokens method. int lineNumberToConsume = lineNumber; int ch = 0; int state = StateFirstLine; List tokens = ListUtil.list(); FastStringBuffer token = new FastStringBuffer(TokenBufferSize); boolean currentTokenIsQuoted = false; if (!(reader instanceof BufferedReader)) { reader = new BufferedReader(reader); } reader.mark(1); try { if (reader.read() == -1) { return; } } catch (MalformedInputException e) { csvErrorHandler.handleError( ErrorIllegalCharacterOrByteSequence, location, lineNumber); throw e; } reader.reset(); try { while (state != StateEOF) { switch (state) { case StateFirstLine: { ch = reader.read(); state = StateBeginningOfField; break; } case StateBeginningOfLine: { if (canConsume(lineNumberToConsume)) { csvConsumer.consumeLineOfTokens(location, lineNumberToConsume, tokens); } else { state = StateEOF; break; } tokens = ListUtil.list(); state = StateBeginningOfField; break; } case StateBeginningOfField: { while ((ch == Space) || (ch == Tab)) { ch = reader.read(); } if (ch == DoubleQuote) { state = StateInQuotedField; currentTokenIsQuoted = true; ch = reader.read(); } else { state = StateInUnquotedField; currentTokenIsQuoted = false; } break; } case StateEndOfField: { String currentToken = token.toString(); // only trim if the value is not double-quoted if (!currentTokenIsQuoted) { currentToken = currentToken.trim(); } while ((ch == Space) || (ch == Tab)) { ch = reader.read(); } if (returnNoValueAsNull && !currentTokenIsQuoted && currentToken.length() == 0) { tokens.add(null); } else { tokens.add(currentToken); } token.truncateToLength(0); if (ch == Comma) { state = StateBeginningOfField; ch = reader.read(); } else if (ch == LF || ch == CR) { lineNumberToConsume = lineNumber; state = StateBeginningOfLine; if (ch == LF) { lineNumber++; } for (; ch == LF || ch == CR;) { ch = reader.read(); if (ch == LF) { lineNumber++; } } if (ch == -1) { state = StateEOF; } } else if (ch == -1) { state = StateEOF; lineNumberToConsume = lineNumber; lineNumber++; } else { csvErrorHandler.handleError( ErrorMissingComma, location, lineNumber); state = StateBeginningOfField; } break; } case StateInUnquotedField: { while (ch >= 0 && ch != Comma && ch != CR && ch != LF) { token.append((char)ch); ch = reader.read(); } state = StateEndOfField; break; } case StateInQuotedField: { while (state == StateInQuotedField) { while (ch >= 0 && ch != DoubleQuote) { if (ch != CR) { token.append((char)ch); } ch = reader.read(); } /* A doubleQuote ends the quoted token, unless there are two in a row. Two doubleQuotes in a row is taken to mean a doubleQuote character value. */ if (ch == DoubleQuote) { ch = reader.read(); if (ch == DoubleQuote) { token.append((char)ch); ch = reader.read(); } else { /* that was the matching quote */ break; } } else { csvErrorHandler.handleError( ErrorUnbalancedQuotes, location, lineNumber); break; } } state = StateEndOfField; break; } default: { state = StateEOF; break; } } } if (!tokens.isEmpty() && canConsume(lineNumberToConsume)) { csvConsumer.consumeLineOfTokens(location, lineNumberToConsume, tokens); } } catch (MalformedInputException e) { csvErrorHandler.handleError( ErrorIllegalCharacterOrByteSequence, location, lineNumber); throw e; } finally { reader.close(); } } /** @param lineNumberToConsume line number of the line being consumed @return true if the line can be consumed */ private boolean canConsume (int lineNumberToConsume) { return ((numberOfLines == AllLines) || (lineNumberToConsume <= numberOfLines)); } /** Reads all the lines into the memory specified in the input file and returns a list of list each of which contain one line of input. Please note, that the complete csv file will be stored in memory. @param file the data source as passed in by one of the higher level read methods @param encoding the encoding to use to read the data @return vector of vector contains each line of input @aribaapi documented */ public static List readAllLines (File file, String encoding) throws IOException { return readAllLines(file, encoding, false); } /** Reads all the lines into the memory specified in the input file and returns a list of list each of which contain one line of input. Please note, that the complete csv file will be stored in memory. @param file the data source as passed in by one of the higher level read methods @param encoding the encoding to use to read the data @param ignoreComments if true it ignores all lines starting with '#' @return vector of vector contains each line of input @aribaapi documented */ public static List readAllLines (File file, String encoding, boolean ignoreComments) throws IOException { return readAllLines(file, encoding, ignoreComments, null, false); } /** Reads all the lines into the memory specified in the input file and returns a list of list each of which contain one line of input. Please note, that the complete csv file will be stored in memory. @param file the data source as passed in by one of the higher level read methods @param encoding the encoding to use to read the data @param ignoreComments if true it ignores all lines starting with '#' @param emptyValueAsNull see {@link #setReturnEmptyValueAsNull} @return vector of vector contains each line of input @aribaapi documented */ public static List readAllLines (File file, String encoding, boolean ignoreComments, boolean emptyValueAsNull) throws IOException { return readAllLines(file, encoding, ignoreComments, null, emptyValueAsNull); } /** Reads all the lines into the memory specified in the input file and returns a list of list each of which contain one line of input. Please note, that the complete csv file will be stored in memory. @param file the data source as passed in by one of the higher level read methods @param encoding the encoding to use to read the data @param ignoreComments if true it ignores all lines starting with the commentMarker String @param commentMarker String that marks the begin of a comment. It has to be at the beginning of a line. If null is specified and ignoreComments is true '#' is used as comment marker @param emptyValueAsNull see {@link #setReturnEmptyValueAsNull} @return vector of vector contains each line of input @aribaapi documented */ public static List readAllLines (File file, String encoding, boolean ignoreComments, String commentMarker, boolean emptyValueAsNull) throws IOException { return readNLines(file, encoding, ignoreComments, commentMarker, emptyValueAsNull, AllLines); } /** @see #readAllLines(File, String, boolean, String, boolean) @param nLines number of lines to read */ public static List readNLines (File file, String encoding, boolean ignoreComments, String commentMarker, boolean emptyValueAsNull, int nLines) throws IOException { Assert.that(file != null, "File is null!"); if (!file.canRead()) { throw new FileNotFoundException(Fmt.S("Cannot read file %s", file)); } List allLines = ListUtil.list(); CSVConsumerHelper csvConsumerHelper = new CSVReader().new CSVConsumerHelper(allLines, ignoreComments, commentMarker); CSVReader reader = new CSVReader(csvConsumerHelper); reader.setNumberOfLines(nLines); reader.setReturnEmptyValueAsNull(emptyValueAsNull); if (encoding == null) { reader.readForSpecifiedEncoding(file); } else { reader.read(file, encoding); } return allLines; } /** Implements the CSVConsumer interface. @aribaapi private */ class CSVConsumerHelper implements CSVConsumer { private boolean _ignoreComments; private List _allLines; private CommentChecker _commentChecker = null; CSVConsumerHelper (List allLines, boolean ignoreComments, String commentMarker) { _allLines = allLines; _ignoreComments = ignoreComments; if (_ignoreComments) { _commentChecker = new CommentChecker(commentMarker); } } /** Implementation of callback specified by CSVConsumer. It cumulates the input lines. @param path the CSV source file @param lineNumber the current line being reported, 1-based. @param line a List of tokens parsed from a one line in the file @aribaapi private */ public void consumeLineOfTokens (String path, int lineNumber, List line) { if (_ignoreComments) { if (_commentChecker.isComment(line)) { return; } } _allLines.add(line); } } /** Reads a CSV file and outputs the results. Useful for testing. @param args command-line arguments; arg[0] is the CSV filename, arg[1] is the encoding type @exception IOException I/O error occurred while reading the file @aribaapi private */ public static void main (String args[]) throws IOException { File f = new File(args[0]); String encoding = null; if (args.length == 2) { encoding = args[1]; } List v = CSVReader.readAllLines(f, encoding); for (int i=0 ; i<v.size(); i++) { Fmt.F(SystemUtil.out(), "Line %s has the following values:\n", i); List elements = (List)v.get(i); for (int j=0; j<elements.size(); j++) { Fmt.F(SystemUtil.out(), "\t%s\n", elements.get(j)); } } SystemUtil.exit(0); } } /** This class checks to see if a line (with its tokens represented as a List) is a comment. */ class CommentChecker { private static final String DefaultCommentMarker = "#"; /** commentMarker specifies what a comment is. */ private String commentMarker; /** Construct an instance with the given commentMarker @param commentMarker the string that indicates a comment. Lines beginning with this value will be treated as comments. If null, the default ('#') will be used. */ CommentChecker (String commentMarker) { this.commentMarker = (commentMarker == null) ? DefaultCommentMarker : commentMarker; } /** Checks to see if the given line is a comment. @param line the given line @return true if this line is a comment, false otherwise. */ boolean isComment (List line) { Object first = ListUtil.firstElement(line); return ((first instanceof String) && ((String)first).startsWith(commentMarker)); } }
package apron.constraint; // Generated from Constraint.g4 by ANTLR 4.0 import org.antlr.v4.runtime.tree.*; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.ParserRuleContext; public class ConstraintBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements ConstraintVisitor<T> { @Override public T visitBoolean_expr(ConstraintParser.Boolean_exprContext ctx) { return visitChildren(ctx); } @Override public T visitFieldS(ConstraintParser.FieldSContext ctx) { return visitChildren(ctx); } @Override public T visitPriorityMin(ConstraintParser.PriorityMinContext ctx) { return visitChildren(ctx); } @Override public T visitFieldM(ConstraintParser.FieldMContext ctx) { return visitChildren(ctx); } @Override public T visitBind(ConstraintParser.BindContext ctx) { return visitChildren(ctx); } @Override public T visitSwitchLevel(ConstraintParser.SwitchLevelContext ctx) { return visitChildren(ctx); } @Override public T visitAssertExpr(ConstraintParser.AssertExprContext ctx) { return visitChildren(ctx); } @Override public T visitPhysicalTopo(ConstraintParser.PhysicalTopoContext ctx) { return visitChildren(ctx); } @Override public T visitActions(ConstraintParser.ActionsContext ctx) { return visitChildren(ctx); } @Override public T visitStatisticsS(ConstraintParser.StatisticsSContext ctx) { return visitChildren(ctx); } @Override public T visitAssertStmtM(ConstraintParser.AssertStmtMContext ctx) { return visitChildren(ctx); } @Override public T visitFlowLevel(ConstraintParser.FlowLevelContext ctx) { return visitChildren(ctx); } @Override public T visitFilterTermFactor(ConstraintParser.FilterTermFactorContext ctx) { return visitChildren(ctx); } @Override public T visitVarPerm(ConstraintParser.VarPermContext ctx) { return visitChildren(ctx); } @Override public T visitPermS(ConstraintParser.PermSContext ctx) { return visitChildren(ctx); } @Override public T visitOwnFlows(ConstraintParser.OwnFlowsContext ctx) { return visitChildren(ctx); } @Override public T visitFilterExprAndTerm(ConstraintParser.FilterExprAndTermContext ctx) { return visitChildren(ctx); } @Override public T visitAssertExclusive(ConstraintParser.AssertExclusiveContext ctx) { return visitChildren(ctx); } @Override public T visitBindList(ConstraintParser.BindListContext ctx) { return visitChildren(ctx); } @Override public T visitModifyEventOrder(ConstraintParser.ModifyEventOrderContext ctx) { return visitChildren(ctx); } @Override public T visitMaxPriority(ConstraintParser.MaxPriorityContext ctx) { return visitChildren(ctx); } @Override public T visitPermListM(ConstraintParser.PermListMContext ctx) { return visitChildren(ctx); } @Override public T visitAllPathsAsLinks(ConstraintParser.AllPathsAsLinksContext ctx) { return visitChildren(ctx); } @Override public T visitPermListS(ConstraintParser.PermListSContext ctx) { return visitChildren(ctx); } @Override public T visitFileDeny(ConstraintParser.FileDenyContext ctx) { return visitChildren(ctx); } @Override public T visitPortLevel(ConstraintParser.PortLevelContext ctx) { return visitChildren(ctx); } @Override public T visitExclusive(ConstraintParser.ExclusiveContext ctx) { return visitChildren(ctx); } @Override public T visitEventInterception(ConstraintParser.EventInterceptionContext ctx) { return visitChildren(ctx); } @Override public T visitAllSwitches(ConstraintParser.AllSwitchesContext ctx) { return visitChildren(ctx); } @Override public T visitBooleanExpr(ConstraintParser.BooleanExprContext ctx) { return visitChildren(ctx); } @Override public T visitDrop(ConstraintParser.DropContext ctx) { return visitChildren(ctx); } @Override public T visitValIp(ConstraintParser.ValIpContext ctx) { return visitChildren(ctx); } @Override public T visitSingleBigSwitch(ConstraintParser.SingleBigSwitchContext ctx) { return visitChildren(ctx); } @Override public T visitAssertNot(ConstraintParser.AssertNotContext ctx) { return visitChildren(ctx); } @Override public T visitSwIdxList(ConstraintParser.SwIdxListContext ctx) { return visitChildren(ctx); } @Override public T visitAssertStmtS(ConstraintParser.AssertStmtSContext ctx) { return visitChildren(ctx); } @Override public T visitBorderSwitches(ConstraintParser.BorderSwitchesContext ctx) { return visitChildren(ctx); } @Override public T visitPhysical_topo(ConstraintParser.Physical_topoContext ctx) { return visitChildren(ctx); } @Override public T visitPermM(ConstraintParser.PermMContext ctx) { return visitChildren(ctx); } @Override public T visitLink_idx(ConstraintParser.Link_idxContext ctx) { return visitChildren(ctx); } @Override public T visitBindApp(ConstraintParser.BindAppContext ctx) { return visitChildren(ctx); } @Override public T visitValInt(ConstraintParser.ValIntContext ctx) { return visitChildren(ctx); } @Override public T visitAssertOr(ConstraintParser.AssertOrContext ctx) { return visitChildren(ctx); } @Override public T visitPermExprUnion(ConstraintParser.PermExprUnionContext ctx) { return visitChildren(ctx); } @Override public T visitOthersFlows(ConstraintParser.OthersFlowsContext ctx) { return visitChildren(ctx); } @Override public T visitField(ConstraintParser.FieldContext ctx) { return visitChildren(ctx); } @Override public T visitLinkListS(ConstraintParser.LinkListSContext ctx) { return visitChildren(ctx); } @Override public T visitVirtualTopo(ConstraintParser.VirtualTopoContext ctx) { return visitChildren(ctx); } @Override public T visitFilterExprTerm(ConstraintParser.FilterExprTermContext ctx) { return visitChildren(ctx); } @Override public T visitLinkListM(ConstraintParser.LinkListMContext ctx) { return visitChildren(ctx); } @Override public T visitVirtualSwitchSetS(ConstraintParser.VirtualSwitchSetSContext ctx) { return visitChildren(ctx); } @Override public T visitWildcard(ConstraintParser.WildcardContext ctx) { return visitChildren(ctx); } @Override public T visitBindExpr(ConstraintParser.BindExprContext ctx) { return visitChildren(ctx); } @Override public T visitForward(ConstraintParser.ForwardContext ctx) { return visitChildren(ctx); } @Override public T visitNetworkDeny(ConstraintParser.NetworkDenyContext ctx) { return visitChildren(ctx); } @Override public T visitLinkM(ConstraintParser.LinkMContext ctx) { return visitChildren(ctx); } @Override public T visitVirtualSwitchSetM(ConstraintParser.VirtualSwitchSetMContext ctx) { return visitChildren(ctx); } @Override public T visitLinkS(ConstraintParser.LinkSContext ctx) { return visitChildren(ctx); } @Override public T visitVirtual_topo(ConstraintParser.Virtual_topoContext ctx) { return visitChildren(ctx); } @Override public T visitFieldMask(ConstraintParser.FieldMaskContext ctx) { return visitChildren(ctx); } @Override public T visitFlowTableB(ConstraintParser.FlowTableBContext ctx) { return visitChildren(ctx); } @Override public T visitFlowTableA(ConstraintParser.FlowTableAContext ctx) { return visitChildren(ctx); } @Override public T visitSystemS(ConstraintParser.SystemSContext ctx) { return visitChildren(ctx); } @Override public T visitFileAllow(ConstraintParser.FileAllowContext ctx) { return visitChildren(ctx); } @Override public T visitCmp_operator(ConstraintParser.Cmp_operatorContext ctx) { return visitChildren(ctx); } @Override public T visitVar_perm(ConstraintParser.Var_permContext ctx) { return visitChildren(ctx); } @Override public T visitAssertAnd(ConstraintParser.AssertAndContext ctx) { return visitChildren(ctx); } @Override public T visitProgram(ConstraintParser.ProgramContext ctx) { return visitChildren(ctx); } @Override public T visitSwIdxListS(ConstraintParser.SwIdxListSContext ctx) { return visitChildren(ctx); } @Override public T visitFieldVal(ConstraintParser.FieldValContext ctx) { return visitChildren(ctx); } @Override public T visitPktOut(ConstraintParser.PktOutContext ctx) { return visitChildren(ctx); } @Override public T visitFilterTermOrFactor(ConstraintParser.FilterTermOrFactorContext ctx) { return visitChildren(ctx); } @Override public T visitSwIdxListM(ConstraintParser.SwIdxListMContext ctx) { return visitChildren(ctx); } @Override public T visitTopology(ConstraintParser.TopologyContext ctx) { return visitChildren(ctx); } @Override public T visitAllFlows(ConstraintParser.AllFlowsContext ctx) { return visitChildren(ctx); } @Override public T visitNetworkAllow(ConstraintParser.NetworkAllowContext ctx) { return visitChildren(ctx); } @Override public T visitApp_name(ConstraintParser.App_nameContext ctx) { return visitChildren(ctx); } @Override public T visitPktOutAllow(ConstraintParser.PktOutAllowContext ctx) { return visitChildren(ctx); } @Override public T visitFlowTable(ConstraintParser.FlowTableContext ctx) { return visitChildren(ctx); } @Override public T visitOwnershipS(ConstraintParser.OwnershipSContext ctx) { return visitChildren(ctx); } @Override public T visitFilterFactorNotFactor(ConstraintParser.FilterFactorNotFactorContext ctx) { return visitChildren(ctx); } @Override public T visitFilterFactorNot(ConstraintParser.FilterFactorNotContext ctx) { return visitChildren(ctx); } @Override public T visitVirtualSwitchSet(ConstraintParser.VirtualSwitchSetContext ctx) { return visitChildren(ctx); } @Override public T visitFilterExpr(ConstraintParser.FilterExprContext ctx) { return visitChildren(ctx); } @Override public T visitPathM(ConstraintParser.PathMContext ctx) { return visitChildren(ctx); } @Override public T visitAssertList(ConstraintParser.AssertListContext ctx) { return visitChildren(ctx); } @Override public T visitPathS(ConstraintParser.PathSContext ctx) { return visitChildren(ctx); } @Override public T visitProcessAllow(ConstraintParser.ProcessAllowContext ctx) { return visitChildren(ctx); } @Override public T visitModify(ConstraintParser.ModifyContext ctx) { return visitChildren(ctx); } @Override public T visitAllDriectLinks(ConstraintParser.AllDriectLinksContext ctx) { return visitChildren(ctx); } @Override public T visitPriorityMax(ConstraintParser.PriorityMaxContext ctx) { return visitChildren(ctx); } @Override public T visitPerm_name(ConstraintParser.Perm_nameContext ctx) { return visitChildren(ctx); } @Override public T visitSw_idx(ConstraintParser.Sw_idxContext ctx) { return visitChildren(ctx); } @Override public T visitModifyField(ConstraintParser.ModifyFieldContext ctx) { return visitChildren(ctx); } @Override public T visitPktOutDeny(ConstraintParser.PktOutDenyContext ctx) { return visitChildren(ctx); } @Override public T visitProcessDeny(ConstraintParser.ProcessDenyContext ctx) { return visitChildren(ctx); } @Override public T visitLinkList(ConstraintParser.LinkListContext ctx) { return visitChildren(ctx); } @Override public T visitNotificationS(ConstraintParser.NotificationSContext ctx) { return visitChildren(ctx); } @Override public T visitFlowPredicate(ConstraintParser.FlowPredicateContext ctx) { return visitChildren(ctx); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.testframework.configvariations; import java.util.Collection; import javax.cache.Cache; import javax.cache.configuration.CacheEntryListenerConfiguration; import javax.cache.configuration.Factory; import javax.cache.configuration.MutableCacheEntryListenerConfiguration; import javax.cache.event.CacheEntryCreatedListener; import javax.cache.event.CacheEntryEventFilter; import javax.cache.event.CacheEntryListener; import javax.cache.event.CacheEntryListenerException; import org.apache.ignite.cache.CacheAtomicWriteOrderMode; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheInterceptorAdapter; import org.apache.ignite.cache.CacheMemoryMode; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.CacheRebalanceMode; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.cache.affinity.fair.FairAffinityFunction; import org.apache.ignite.cache.eviction.EvictionFilter; import org.apache.ignite.cache.eviction.fifo.FifoEvictionPolicy; import org.apache.ignite.cache.store.CacheStoreSession; import org.apache.ignite.cache.store.CacheStoreSessionListener; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.configuration.TopologyValidator; import org.apache.ignite.internal.binary.BinaryMarshaller; import org.apache.ignite.marshaller.optimized.OptimizedMarshaller; import org.apache.ignite.spi.swapspace.inmemory.GridTestSwapSpaceSpi; import org.apache.ignite.testframework.junits.IgniteCacheConfigVariationsAbstractTest; import static org.apache.ignite.internal.util.lang.GridFunc.asArray; /** * Cache configuration variations. */ @SuppressWarnings("serial") public class ConfigVariations { /** */ private static final ConfigParameter<Object> EVICTION_PARAM = Parameters.complexParameter( Parameters.parameter("setEvictionPolicy", Parameters.factory(FifoEvictionPolicy.class)), Parameters.parameter("setEvictionFilter", Parameters.factory(NoopEvictionFilter.class)) ); /** */ private static final ConfigParameter<Object> CACHE_STORE_PARAM = Parameters.complexParameter( Parameters.parameter("setCacheStoreFactory", Parameters.factory(IgniteCacheConfigVariationsAbstractTest.TestStoreFactory.class)), Parameters.parameter("setReadThrough", true), Parameters.parameter("setWriteThrough", true), Parameters.parameter("setCacheStoreSessionListenerFactories", noopCacheStoreSessionListenerFactory()) ); /** */ private static final ConfigParameter<Object> SIMPLE_CACHE_STORE_PARAM = Parameters.complexParameter( Parameters.parameter("setCacheStoreFactory", Parameters.factory(IgniteCacheConfigVariationsAbstractTest.TestStoreFactory.class)), Parameters.parameter("setReadThrough", true), Parameters.parameter("setWriteThrough", true) ); /** */ private static final ConfigParameter<Object> REBALANCING_PARAM = Parameters.complexParameter( Parameters.parameter("setRebalanceBatchSize", 2028 * 1024), Parameters.parameter("setRebalanceBatchesPrefetchCount", 5L), Parameters.parameter("setRebalanceThreadPoolSize", 5), Parameters.parameter("setRebalanceTimeout", CacheConfiguration.DFLT_REBALANCE_TIMEOUT * 2), Parameters.parameter("setRebalanceDelay", 1000L) ); /** */ private static final ConfigParameter<Object> ONHEAP_TIERED_MEMORY_PARAM = Parameters.parameter("setMemoryMode", CacheMemoryMode.ONHEAP_TIERED); /** */ private static final ConfigParameter<Object> OFFHEAP_TIERED_MEMORY_PARAM = Parameters.parameter("setMemoryMode", CacheMemoryMode.OFFHEAP_TIERED); /** */ private static final ConfigParameter<Object> OFFHEAP_VALUES_MEMORY_PARAM = Parameters.parameter("setMemoryMode", CacheMemoryMode.OFFHEAP_VALUES); /** */ private static final ConfigParameter<Object> OFFHEAP_ENABLED = Parameters.parameter("setOffHeapMaxMemory", 10 * 1024 * 1024L); /** */ @SuppressWarnings("unchecked") private static final ConfigParameter<IgniteConfiguration>[][] BASIC_IGNITE_SET = new ConfigParameter[][] { Parameters.objectParameters("setMarshaller", Parameters.factory(BinaryMarshaller.class), optimizedMarshallerFactory()), Parameters.booleanParameters("setPeerClassLoadingEnabled"), Parameters.objectParameters("setSwapSpaceSpi", Parameters.factory(GridTestSwapSpaceSpi.class)), }; /** */ @SuppressWarnings("unchecked") private static final ConfigParameter<CacheConfiguration>[][] BASIC_CACHE_SET = new ConfigParameter[][] { Parameters.objectParameters("setCacheMode", CacheMode.REPLICATED, CacheMode.PARTITIONED), Parameters.enumParameters("setAtomicityMode", CacheAtomicityMode.class), Parameters.enumParameters("setMemoryMode", CacheMemoryMode.class), // Set default parameters. Parameters.objectParameters("setLoadPreviousValue", true), Parameters.objectParameters("setSwapEnabled", true), asArray(SIMPLE_CACHE_STORE_PARAM), Parameters.objectParameters("setWriteSynchronizationMode", CacheWriteSynchronizationMode.FULL_SYNC), Parameters.objectParameters("setAtomicWriteOrderMode", CacheAtomicWriteOrderMode.PRIMARY), Parameters.objectParameters("setStartSize", 1024), }; /** */ @SuppressWarnings("unchecked") private static final ConfigParameter<CacheConfiguration>[][] FULL_CACHE_SET = new ConfigParameter[][] { Parameters.enumParameters("setCacheMode", CacheMode.class), Parameters.enumParameters("setAtomicityMode", CacheAtomicityMode.class), asArray(ONHEAP_TIERED_MEMORY_PARAM, Parameters.complexParameter(ONHEAP_TIERED_MEMORY_PARAM, OFFHEAP_ENABLED), Parameters.complexParameter(OFFHEAP_TIERED_MEMORY_PARAM, OFFHEAP_ENABLED), Parameters.complexParameter(OFFHEAP_VALUES_MEMORY_PARAM, OFFHEAP_ENABLED) ), Parameters.booleanParameters("setLoadPreviousValue"), Parameters.booleanParameters("setReadFromBackup"), Parameters.booleanParameters("setStoreKeepBinary"), Parameters.objectParameters("setRebalanceMode", CacheRebalanceMode.SYNC, CacheRebalanceMode.ASYNC), Parameters.booleanParameters("setSwapEnabled"), Parameters.booleanParameters("setCopyOnRead"), Parameters.objectParameters(true, "setNearConfiguration", nearCacheConfigurationFactory()), asArray(null, Parameters.complexParameter( EVICTION_PARAM, CACHE_STORE_PARAM, REBALANCING_PARAM, Parameters.parameter("setAffinity", Parameters.factory(FairAffinityFunction.class)), Parameters.parameter("setInterceptor", Parameters.factory(NoopInterceptor.class)), Parameters.parameter("setTopologyValidator", Parameters.factory(NoopTopologyValidator.class)), Parameters.parameter("addCacheEntryListenerConfiguration", Parameters.factory(EmptyCacheEntryListenerConfiguration.class)) ) ), // Set default parameters. Parameters.objectParameters("setWriteSynchronizationMode", CacheWriteSynchronizationMode.FULL_SYNC), Parameters.objectParameters("setAtomicWriteOrderMode", CacheAtomicWriteOrderMode.PRIMARY), Parameters.objectParameters("setStartSize", 1024), }; /** * Private constructor. */ private ConfigVariations() { // No-op. } /** * @return Custom near cache config. */ private static Factory nearCacheConfigurationFactory() { return new Factory() { @Override public Object create() { NearCacheConfiguration cfg = new NearCacheConfiguration<>(); cfg.setNearEvictionPolicy(new FifoEvictionPolicy()); return cfg; } }; } /** * @return Noop cache store session listener factory. */ private static Factory noopCacheStoreSessionListenerFactory() { return new Factory() { @Override public Object create() { return new Factory[] {new NoopCacheStoreSessionListenerFactory()}; } }; } /** * @return Default matrix of availiable variations. */ public static ConfigParameter<CacheConfiguration>[][] cacheBasicSet() { return BASIC_CACHE_SET; } /** * @return Full matrix of availiable variations. */ public static ConfigParameter<CacheConfiguration>[][] cacheFullSet() { return FULL_CACHE_SET; } /** * @return Default matrix of availiable variations. */ public static ConfigParameter<IgniteConfiguration>[][] igniteBasicSet() { return BASIC_IGNITE_SET; } /** * @return Marshaller. */ private static Factory<OptimizedMarshaller> optimizedMarshallerFactory() { return new Factory<OptimizedMarshaller>() { @Override public OptimizedMarshaller create() { OptimizedMarshaller marsh = new OptimizedMarshaller(true); marsh.setRequireSerializable(false); return marsh; } }; } /** * */ public static class NoopEvictionFilter implements EvictionFilter { /** */ private static final long serialVersionUID = 0; /** {@inheritDoc} */ @Override public boolean evictAllowed(Cache.Entry entry) { return true; } } /** * */ public static class NoopInterceptor extends CacheInterceptorAdapter { /** */ private static final long serialVersionUID = 0L; // No-op. } /** * */ public static class NoopCacheStoreSessionListenerFactory implements Factory<NoopCacheStoreSessionListener> { /** Serial version uid. */ private static final long serialVersionUID = 0L; /** {@inheritDoc} */ @Override public NoopCacheStoreSessionListener create() { return new NoopCacheStoreSessionListener(); } } /** * */ public static class NoopCacheStoreSessionListener implements CacheStoreSessionListener { /** {@inheritDoc} */ @Override public void onSessionStart(CacheStoreSession ses) { // No-op. } /** {@inheritDoc} */ @Override public void onSessionEnd(CacheStoreSession ses, boolean commit) { // No-op. } } /** * */ public static class NoopTopologyValidator implements TopologyValidator { /** */ private static final long serialVersionUID = 0L; /** {@inheritDoc} */ @Override public boolean validate(Collection<ClusterNode> nodes) { return true; } } /** * */ @SuppressWarnings({"serial", "unchecked"}) public static class EmptyCacheEntryListenerConfiguration extends MutableCacheEntryListenerConfiguration { /** * */ public EmptyCacheEntryListenerConfiguration() { super(new NoopCacheEntryListenerConfiguration()); } } /** * */ @SuppressWarnings("serial") public static class NoopCacheEntryListenerConfiguration implements CacheEntryListenerConfiguration { /** {@inheritDoc} */ @Override public Factory<CacheEntryListener> getCacheEntryListenerFactory() { return new Factory<CacheEntryListener>() { @Override public CacheEntryListener create() { return new NoopCacheEntryListener(); } }; } /** {@inheritDoc} */ @Override public boolean isOldValueRequired() { return false; } /** {@inheritDoc} */ @Override public Factory<CacheEntryEventFilter> getCacheEntryEventFilterFactory() { return null; } /** {@inheritDoc} */ @Override public boolean isSynchronous() { return false; } } /** * */ public static class NoopCacheEntryListener implements CacheEntryCreatedListener { /** {@inheritDoc} */ @Override public void onCreated(Iterable iterable) throws CacheEntryListenerException { // No-op. } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.tools.mapred; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.EnumSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.tools.DistCpConstants; import org.apache.hadoop.tools.DistCpOptions.FileAttribute; import org.apache.hadoop.tools.mapred.CopyMapper.FileAction; import org.apache.hadoop.tools.util.DistCpUtils; import org.apache.hadoop.tools.util.RetriableCommand; import org.apache.hadoop.tools.util.ThrottledInputStream; import com.google.common.annotations.VisibleForTesting; /** * This class extends RetriableCommand to implement the copy of files, * with retries on failure. */ public class RetriableFileCopyCommand extends RetriableCommand { private static Log LOG = LogFactory.getLog(RetriableFileCopyCommand.class); private static int BUFFER_SIZE = 8 * 1024; private boolean skipCrc = false; private FileAction action; /** * Constructor, taking a description of the action. * @param description Verbose description of the copy operation. */ public RetriableFileCopyCommand(String description, FileAction action) { super(description); this.action = action; } /** * Create a RetriableFileCopyCommand. * * @param skipCrc Whether to skip the crc check. * @param description A verbose description of the copy operation. * @param action We should overwrite the target file or append new data to it. */ public RetriableFileCopyCommand(boolean skipCrc, String description, FileAction action) { this(description, action); this.skipCrc = skipCrc; } /** * Implementation of RetriableCommand::doExecute(). * This is the actual copy-implementation. * @param arguments Argument-list to the command. * @return Number of bytes copied. * @throws Exception */ @SuppressWarnings("unchecked") @Override protected Object doExecute(Object... arguments) throws Exception { assert arguments.length == 4 : "Unexpected argument list."; FileStatus source = (FileStatus)arguments[0]; assert !source.isDirectory() : "Unexpected file-status. Expected file."; Path target = (Path)arguments[1]; Mapper.Context context = (Mapper.Context)arguments[2]; EnumSet<FileAttribute> fileAttributes = (EnumSet<FileAttribute>)arguments[3]; return doCopy(source, target, context, fileAttributes); } private long doCopy(FileStatus sourceFileStatus, Path target, Mapper.Context context, EnumSet<FileAttribute> fileAttributes) throws IOException { final boolean toAppend = action == FileAction.APPEND; Path targetPath = toAppend ? target : getTmpFile(target, context); final Configuration configuration = context.getConfiguration(); FileSystem targetFS = target.getFileSystem(configuration); try { if (LOG.isDebugEnabled()) { LOG.debug("Copying " + sourceFileStatus.getPath() + " to " + target); LOG.debug("Target file path: " + targetPath); } final Path sourcePath = sourceFileStatus.getPath(); final FileSystem sourceFS = sourcePath.getFileSystem(configuration); final FileChecksum sourceChecksum = fileAttributes .contains(FileAttribute.CHECKSUMTYPE) ? sourceFS .getFileChecksum(sourcePath) : null; final long offset = action == FileAction.APPEND ? targetFS.getFileStatus( target).getLen() : 0; long bytesRead = copyToFile(targetPath, targetFS, sourceFileStatus, offset, context, fileAttributes, sourceChecksum); compareFileLengths(sourceFileStatus, targetPath, configuration, bytesRead + offset); //At this point, src&dest lengths are same. if length==0, we skip checksum if ((bytesRead != 0) && (!skipCrc)) { compareCheckSums(sourceFS, sourceFileStatus.getPath(), sourceChecksum, targetFS, targetPath); } // it's not append case, thus we first write to a temporary file, rename // it to the target path. if (!toAppend) { promoteTmpToTarget(targetPath, target, targetFS); } return bytesRead; } finally { // note that for append case, it is possible that we append partial data // and then fail. In that case, for the next retry, we either reuse the // partial appended data if it is good or we overwrite the whole file if (!toAppend && targetFS.exists(targetPath)) { targetFS.delete(targetPath, false); } } } /** * @return the checksum spec of the source checksum if checksum type should be * preserved */ private ChecksumOpt getChecksumOpt(EnumSet<FileAttribute> fileAttributes, FileChecksum sourceChecksum) { if (fileAttributes.contains(FileAttribute.CHECKSUMTYPE) && sourceChecksum != null) { return sourceChecksum.getChecksumOpt(); } return null; } private long copyToFile(Path targetPath, FileSystem targetFS, FileStatus sourceFileStatus, long sourceOffset, Mapper.Context context, EnumSet<FileAttribute> fileAttributes, final FileChecksum sourceChecksum) throws IOException { FsPermission permission = FsPermission.getFileDefault().applyUMask( FsPermission.getUMask(targetFS.getConf())); final OutputStream outStream; if (action == FileAction.OVERWRITE) { final short repl = getReplicationFactor(fileAttributes, sourceFileStatus, targetFS, targetPath); final long blockSize = getBlockSize(fileAttributes, sourceFileStatus, targetFS, targetPath); FSDataOutputStream out = targetFS.create(targetPath, permission, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), BUFFER_SIZE, repl, blockSize, context, getChecksumOpt(fileAttributes, sourceChecksum)); outStream = new BufferedOutputStream(out); } else { outStream = new BufferedOutputStream(targetFS.append(targetPath, BUFFER_SIZE)); } return copyBytes(sourceFileStatus, sourceOffset, outStream, BUFFER_SIZE, context); } private void compareFileLengths(FileStatus sourceFileStatus, Path target, Configuration configuration, long targetLen) throws IOException { final Path sourcePath = sourceFileStatus.getPath(); FileSystem fs = sourcePath.getFileSystem(configuration); long srcLen = fs.getFileStatus(sourcePath).getLen(); if (srcLen != targetLen) throw new IOException("Mismatch in length of source:" + sourcePath + " (" + srcLen + ") and target:" + target + " (" + targetLen + ")"); } private void compareCheckSums(FileSystem sourceFS, Path source, FileChecksum sourceChecksum, FileSystem targetFS, Path target) throws IOException { if (!DistCpUtils.checksumsAreEqual(sourceFS, source, sourceChecksum, targetFS, target)) { StringBuilder errorMessage = new StringBuilder("Check-sum mismatch between ") .append(source).append(" and ").append(target).append("."); if (sourceFS.getFileStatus(source).getBlockSize() != targetFS.getFileStatus(target).getBlockSize()) { errorMessage.append(" Source and target differ in block-size.") .append(" Use -pb to preserve block-sizes during copy.") .append(" Alternatively, skip checksum-checks altogether, using -skipCrc.") .append(" (NOTE: By skipping checksums, one runs the risk of " + "masking data-corruption during file-transfer.)"); } throw new IOException(errorMessage.toString()); } } //If target file exists and unable to delete target - fail //If target doesn't exist and unable to create parent folder - fail //If target is successfully deleted and parent exists, if rename fails - fail private void promoteTmpToTarget(Path tmpTarget, Path target, FileSystem fs) throws IOException { if ((fs.exists(target) && !fs.delete(target, false)) || (!fs.exists(target.getParent()) && !fs.mkdirs(target.getParent())) || !fs.rename(tmpTarget, target)) { throw new IOException("Failed to promote tmp-file:" + tmpTarget + " to: " + target); } } private Path getTmpFile(Path target, Mapper.Context context) { Path targetWorkPath = new Path(context.getConfiguration(). get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH)); Path root = target.equals(targetWorkPath)? targetWorkPath.getParent() : targetWorkPath; LOG.info("Creating temp file: " + new Path(root, ".distcp.tmp." + context.getTaskAttemptID().toString())); return new Path(root, ".distcp.tmp." + context.getTaskAttemptID().toString()); } @VisibleForTesting long copyBytes(FileStatus sourceFileStatus, long sourceOffset, OutputStream outStream, int bufferSize, Mapper.Context context) throws IOException { Path source = sourceFileStatus.getPath(); byte buf[] = new byte[bufferSize]; ThrottledInputStream inStream = null; long totalBytesRead = 0; try { inStream = getInputStream(source, context.getConfiguration()); int bytesRead = readBytes(inStream, buf, sourceOffset); while (bytesRead >= 0) { totalBytesRead += bytesRead; if (action == FileAction.APPEND) { sourceOffset += bytesRead; } outStream.write(buf, 0, bytesRead); updateContextStatus(totalBytesRead, context, sourceFileStatus); bytesRead = readBytes(inStream, buf, sourceOffset); } outStream.close(); outStream = null; } finally { IOUtils.cleanup(LOG, outStream, inStream); } return totalBytesRead; } private void updateContextStatus(long totalBytesRead, Mapper.Context context, FileStatus sourceFileStatus) { StringBuilder message = new StringBuilder(DistCpUtils.getFormatter() .format(totalBytesRead * 100.0f / sourceFileStatus.getLen())); message.append("% ") .append(description).append(" [") .append(DistCpUtils.getStringDescriptionFor(totalBytesRead)) .append('/') .append(DistCpUtils.getStringDescriptionFor(sourceFileStatus.getLen())) .append(']'); context.setStatus(message.toString()); } private static int readBytes(ThrottledInputStream inStream, byte buf[], long position) throws IOException { try { if (position == 0) { return inStream.read(buf); } else { return inStream.read(position, buf, 0, buf.length); } } catch (IOException e) { throw new CopyReadException(e); } } private static ThrottledInputStream getInputStream(Path path, Configuration conf) throws IOException { try { FileSystem fs = path.getFileSystem(conf); long bandwidthMB = conf.getInt(DistCpConstants.CONF_LABEL_BANDWIDTH_MB, DistCpConstants.DEFAULT_BANDWIDTH_MB); FSDataInputStream in = fs.open(path); return new ThrottledInputStream(in, bandwidthMB * 1024 * 1024); } catch (IOException e) { throw new CopyReadException(e); } } private static short getReplicationFactor( EnumSet<FileAttribute> fileAttributes, FileStatus sourceFile, FileSystem targetFS, Path tmpTargetPath) { return fileAttributes.contains(FileAttribute.REPLICATION)? sourceFile.getReplication() : targetFS.getDefaultReplication(tmpTargetPath); } /** * @return the block size of the source file if we need to preserve either * the block size or the checksum type. Otherwise the default block * size of the target FS. */ private static long getBlockSize( EnumSet<FileAttribute> fileAttributes, FileStatus sourceFile, FileSystem targetFS, Path tmpTargetPath) { boolean preserve = fileAttributes.contains(FileAttribute.BLOCKSIZE) || fileAttributes.contains(FileAttribute.CHECKSUMTYPE); return preserve ? sourceFile.getBlockSize() : targetFS .getDefaultBlockSize(tmpTargetPath); } /** * Special subclass of IOException. This is used to distinguish read-operation * failures from other kinds of IOExceptions. * The failure to read from source is dealt with specially, in the CopyMapper. * Such failures may be skipped if the DistCpOptions indicate so. * Write failures are intolerable, and amount to CopyMapper failure. */ public static class CopyReadException extends IOException { public CopyReadException(Throwable rootCause) { super(rootCause); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.client; import java.util.HashSet; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.client.ClientConsumer; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.client.ClientProducer; import org.apache.activemq.artemis.api.core.client.ClientSession; import org.apache.activemq.artemis.api.core.client.ClientSessionFactory; import org.apache.activemq.artemis.api.core.client.MessageHandler; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.tests.integration.IntegrationTestLogger; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.apache.activemq.artemis.utils.RandomUtil; import org.junit.Before; import org.junit.Test; public class MessageConcurrencyTest extends ActiveMQTestBase { private static final IntegrationTestLogger log = IntegrationTestLogger.LOGGER; private ActiveMQServer server; private final SimpleString ADDRESS = new SimpleString("MessageConcurrencyTestAddress"); private final SimpleString QUEUE_NAME = new SimpleString("MessageConcurrencyTestQueue"); private ServerLocator locator; @Override @Before public void setUp() throws Exception { super.setUp(); server = createServer(false); server.start(); locator = createInVMNonHALocator(); } // Test that a created message can be sent via multiple producers on different sessions concurrently @Test public void testMessageConcurrency() throws Exception { ClientSessionFactory sf = createSessionFactory(locator); ClientSession createSession = sf.createSession(); Set<ClientSession> sendSessions = new HashSet<>(); Set<Sender> senders = new HashSet<>(); final int numSessions = 100; final int numMessages = 1000; for (int i = 0; i < numSessions; i++) { ClientSession sendSession = sf.createSession(); sendSessions.add(sendSession); ClientProducer producer = sendSession.createProducer(ADDRESS); Sender sender = new Sender(numMessages, producer); senders.add(sender); sender.start(); } for (int i = 0; i < numMessages; i++) { byte[] body = RandomUtil.randomBytes(1000); ClientMessage message = createSession.createMessage(false); message.getBodyBuffer().writeBytes(body); for (Sender sender : senders) { sender.queue.add(message); } } for (Sender sender : senders) { sender.join(); assertFalse(sender.failed); } for (ClientSession sendSession : sendSessions) { sendSession.close(); } createSession.close(); sf.close(); } // Test that a created message can be sent via multiple producers after being consumed from a single consumer @Test public void testMessageConcurrencyAfterConsumption() throws Exception { ClientSessionFactory sf = createSessionFactory(locator); ClientSession consumeSession = sf.createSession(); final ClientProducer mainProducer = consumeSession.createProducer(ADDRESS); consumeSession.createQueue(ADDRESS, QUEUE_NAME); ClientConsumer consumer = consumeSession.createConsumer(QUEUE_NAME); consumeSession.start(); Set<ClientSession> sendSessions = new HashSet<>(); final Set<Sender> senders = new HashSet<>(); final int numSessions = 100; final int numMessages = 1000; for (int i = 0; i < numSessions; i++) { ClientSession sendSession = sf.createSession(); sendSessions.add(sendSession); ClientProducer producer = sendSession.createProducer(ADDRESS); Sender sender = new Sender(numMessages, producer); senders.add(sender); sender.start(); } consumer.setMessageHandler(new MessageHandler() { @Override public void onMessage(ClientMessage message) { for (Sender sender : senders) { sender.queue.add(message); } } }); for (int i = 0; i < numMessages; i++) { byte[] body = RandomUtil.randomBytes(1000); ClientMessage message = consumeSession.createMessage(false); message.getBodyBuffer().writeBytes(body); mainProducer.send(message); } for (Sender sender : senders) { sender.join(); assertFalse(sender.failed); } for (ClientSession sendSession : sendSessions) { sendSession.close(); } consumer.close(); consumeSession.deleteQueue(QUEUE_NAME); consumeSession.close(); sf.close(); } private class Sender extends Thread { private final BlockingQueue<ClientMessage> queue = new LinkedBlockingQueue<>(); private final ClientProducer producer; private final int numMessages; Sender(final int numMessages, final ClientProducer producer) { this.numMessages = numMessages; this.producer = producer; } volatile boolean failed; @Override public void run() { try { for (int i = 0; i < numMessages; i++) { ClientMessage msg = queue.take(); producer.send(msg); } } catch (Exception e) { log.error("Failed to send message", e); failed = true; } } } }
package LBJ2.IR; import java.util.Iterator; import java.util.LinkedList; import LBJ2.Pass; import LBJ2.frontend.TokenValue; /** * Represents an inference specification. * * @author Nick Rizzolo **/ public class InferenceDeclaration extends Declaration implements LBJ2.CodeGenerator { /** If no inference algorithm is specified, this algorithm is used. */ public static final InstanceCreationExpression defaultInferenceConstructor = new InstanceCreationExpression( new Name("ILPInference"), new ExpressionList( new InstanceCreationExpression( new Name("GLPKHook"), new ExpressionList(), -1, -1)), -1, -1); /** * (&not;&oslash;) A specification of the object from which all variables * can be found. **/ public Argument head; /** * (&not;&oslash;) The methods used to find the head object given objects * of different types. **/ public HeadFinder[] headFinders; /** * (&not;&oslash;) Declarations describing how the scores produced by * various learning classifiers should be normalized. **/ public NormalizerDeclaration[] normalizerDeclarations; /** * (&not;&oslash;) The constraint that must be respected during * optimization. **/ public ConstraintDeclaration constraint; /** * Counts the number of <code>subjectto</code> clauses for error detection. **/ public int subjecttoClauses; /** (&oslash;) A constructor for the inference algorithm to use. */ public InstanceCreationExpression algorithm; /** Counts the number of <code>with</code> clauses for error detection. */ public int withClauses; /** * Full constructor. * * @param com A Javadoc comment associated with the declaration. * @param line The line on which the source code represented by this * node is found. * @param byteOffset The byte offset from the beginning of the source file * at which the source code represented by this node is * found. * @param n The inference's name. * @param h The specification of the head object. * @param f An array of methods used to find the head object. * @param d An array of normalizer declarations. * @param con The constraint this inference must respect. * @param a A constructor for the inference algorithm. **/ public InferenceDeclaration(String com, int line, int byteOffset, Name n, Argument h, HeadFinder[] f, NormalizerDeclaration[] d, ConstraintDeclaration con, InstanceCreationExpression a) { super(com, n, line, byteOffset); head = h; headFinders = f; if (headFinders == null) headFinders = new HeadFinder[0]; normalizerDeclarations = d; if (normalizerDeclarations == null) normalizerDeclarations = new NormalizerDeclaration[0]; if (con == null) { constraint = new ConstraintDeclaration( null, -1, -1, new Name(name + "$subjectto"), h, new Block( new StatementList( new ExpressionStatement( new ConstraintStatementExpression( new ConstraintEqualityExpression( new Operator(Operator.CONSTRAINT_EQUAL), new Constant("true"), new Constant("true"))))))); } else constraint = con; subjecttoClauses = 1; algorithm = a; withClauses = algorithm == null ? 0 : 1; } /** * Parser's constructor. Line and byte offset information is taken from * the first token. * * @param t The first token indicates line and byte offset information. * @param i The identifier token representing the classifier's name. * @param h The specification of the head object. * @param c A list of clauses from the body of the declaration. **/ public InferenceDeclaration(TokenValue t, TokenValue i, Argument h, LinkedList c) { this(null, t.line, t.byteOffset, new Name(i), h, null, null, null, null); subjecttoClauses = 0; LinkedList finders = new LinkedList(); LinkedList normalizers = new LinkedList(); for (Iterator I = c.iterator(); I.hasNext(); ) { Clause clause = (Clause) I.next(); if (clause.type == Clause.HEAD_FINDER) finders.add(clause.argument); else if (clause.type == Clause.SUBJECTTO) { Block b = (Block) clause.argument; constraint = new ConstraintDeclaration(null, b.line, b.byteOffset, new Name(name + "$subjectto"), h, b); ++subjecttoClauses; } else if (clause.type == Clause.WITH) { algorithm = (InstanceCreationExpression) clause.argument; ++withClauses; } else if (clause.type == Clause.NORMALIZER_DECLARATION) normalizers.add(clause.argument); } headFinders = (HeadFinder[]) finders.toArray(new HeadFinder[finders.size()]); normalizerDeclarations = (NormalizerDeclaration[]) normalizers.toArray(new NormalizerDeclaration[normalizers.size()]); } /** * Returns <code>true</code> iff at least one of the normalizer * declarations is specific to a given type. **/ public boolean containsTypeSpecificNormalizer() { for (int i = 0; i < normalizerDeclarations.length; ++i) if (normalizerDeclarations[i].learner != null) return true; return false; } /** * Returns the type of the declaration. * * @return The type of the declaration. **/ public Type getType() { return new InferenceType(head.getType(), headFinders); } /** Returns the name of the <code>InferenceDeclaration</code>. */ public String getName() { return name.toString(); } /** * Returns the line number on which this AST node is found in the source * (starting from line 0). This method exists to fulfull the * implementation of <code>CodeGenerator</code>. * @see LBJ2.CodeGenerator **/ public int getLine() { return line; } /** * Returns a shallow textual representation of this AST node. The * difference between the result of this method and the result of * <code>write(StringBuffer)</code> is that this method omits the * <code>subjectto</code> clause. **/ public StringBuffer shallow() { StringBuffer buffer = new StringBuffer(); buffer.append("inference "); name.write(buffer); buffer.append(" head "); head.write(buffer); buffer.append(" { "); for (int i = 0; i < headFinders.length; ++i) { headFinders[i].write(buffer); buffer.append(" "); } for (int i = 0; i < normalizerDeclarations.length; ++i) { normalizerDeclarations[i].write(buffer); buffer.append(" "); } if (algorithm != null) { buffer.append(" with "); algorithm.write(buffer); } buffer.append(" }"); return buffer; } /** * Returns an iterator used to successively access the children of this * node. * * @return An iterator used to successively access the children of this * node. **/ public ASTNodeIterator iterator() { int total = headFinders.length + normalizerDeclarations.length + 3; if (algorithm != null) ++total; ASTNodeIterator I = new ASTNodeIterator(total); I.children[0] = head; for (int i = 0; i < headFinders.length; ++i) I.children[i + 1] = headFinders[i]; for (int i = 0; i < normalizerDeclarations.length; ++i) I.children[i + 1 + headFinders.length] = normalizerDeclarations[i]; I.children[headFinders.length + normalizerDeclarations.length + 1] = constraint; if (algorithm != null) I.children[headFinders.length + normalizerDeclarations.length + 2] = algorithm; return I; } /** * Creates a new object with the same primitive data, and recursively * creates new member data objects as well. * * @return The clone node. **/ public Object clone() { return new InferenceDeclaration( comment, -1, -1, (Name) name.clone(), (Argument) head.clone(), (HeadFinder[]) headFinders.clone(), (NormalizerDeclaration[]) normalizerDeclarations.clone(), (ConstraintDeclaration) constraint.clone(), algorithm == null ? null : (InstanceCreationExpression) algorithm.clone()); } /** * Ensures that the correct <code>run()</code> method is called for this * type of node. * * @param pass The pass whose <code>run()</code> method should be called. **/ public void runPass(Pass pass) { pass.run(this); } /** * Writes a string representation of this <code>ASTNode</code> to the * specified buffer. The representation written is parsable by the LBJ2 * compiler, but not very readable. * * @param buffer The buffer to write to. **/ public void write(StringBuffer buffer) { buffer.append("inference "); name.write(buffer); buffer.append(" head "); head.write(buffer); buffer.append(" { "); for (int i = 0; i < headFinders.length; ++i) { headFinders[i].write(buffer); buffer.append(" "); } for (int i = 0; i < normalizerDeclarations.length; ++i) { normalizerDeclarations[i].write(buffer); buffer.append(" "); } buffer.append("subjectto "); constraint.body.write(buffer); if (algorithm != null) { buffer.append(" with "); algorithm.write(buffer); } buffer.append(" }"); } /** * A head finder is a method that finds the head object for an inference * given another object. <code>HeadFinder</code> objects are only * constructed by the <code>InferenceDeclaration</code> constructor and * only stored in <code>InferenceDeclaration</code> objects. * * @author Nick Rizzolo **/ public static class HeadFinder extends ASTNode { /** (&not;&oslash;) Input specification of the head finder method. */ public Argument argument; /** (&not;&oslash;) Body of the head finder method. */ public Block body; /** * Full constructor. Line and byte offset information are taken from the * argument. * * @param a The argument to the head finder method. * @param b The body of the head finder method. **/ public HeadFinder(Argument a, Block b) { super(a.line, a.byteOffset); argument = a; body = b; } /** * Returns an iterator used to successively access the children of this * node. * * @return An iterator used to successively access the children of this * node. **/ public ASTNodeIterator iterator() { ASTNodeIterator I = new ASTNodeIterator(2); I.children[0] = argument; I.children[1] = body; return I; } /** * Creates a new object with the same primitive data, and recursively * creates new member data objects as well. * * @return The clone node. **/ public Object clone() { return new HeadFinder((Argument) argument.clone(), (Block) body.clone()); } /** * Ensures that the correct <code>run()</code> method is called for this * type of node. * * @param pass The pass whose <code>run()</code> method should be called. **/ public void runPass(Pass pass) { pass.run(this); } /** * Writes a string representation of this <code>ASTNode</code> to the * specified buffer. The representation written is parsable by the LBJ2 * compiler, but not very readable. * * @param buffer The buffer to write to. **/ public void write(StringBuffer buffer) { argument.write(buffer); buffer.append(" "); body.write(buffer); } } /** * A normalizer declaration is a clause of an inference declaration that * specifies a normalizer to be used in association with a particular * learning classifier or in general. <code>NormalizerDeclaration</code> * objects are only constructed by the <code>InferenceDeclaration</code> * constructor and only stored in <code>InferenceDeclaration</code> * objects. * * @author Nick Rizzolo **/ public static class NormalizerDeclaration extends ASTNode { /** (&oslash;) The name of the learner to be normalized. */ public Name learner; /** (&not;&oslash;) Constructs the normalizer to use. */ public InstanceCreationExpression normalizer; /** * Full constructor. * * @param line The line on which the source code represented by * this node is found. * @param byteOffset The byte offset from the beginning of the source * file at which the source code represented by this * node is found. * @param l The name of the learner. * @param n Constructs the normalizer. **/ public NormalizerDeclaration(int line, int byteOffset, Name l, InstanceCreationExpression n) { super(line, byteOffset); learner = l; normalizer = n; } /** * Parser's constructor. Line and byte offset information are taken from * the token. * * @param t The token containing line and byte offset information. * @param l The name of the learner. * @param n Constructs the normalizer. **/ public NormalizerDeclaration(TokenValue t, Name l, InstanceCreationExpression n) { super(t.line, t.byteOffset); learner = l; normalizer = n; } /** * Returns an iterator used to successively access the children of this * node. * * @return An iterator used to successively access the children of this * node. **/ public ASTNodeIterator iterator() { ASTNodeIterator I = new ASTNodeIterator(learner == null ? 1 : 2); if (learner != null) I.children[0] = learner; I.children[I.children.length - 1] = normalizer; return I; } /** * Creates a new object with the same primitive data, and recursively * creates new member data objects as well. * * @return The clone node. **/ public Object clone() { return new NormalizerDeclaration( -1, -1, learner == null ? null : (Name) learner.clone(), (InstanceCreationExpression) normalizer.clone()); } /** * Ensures that the correct <code>run()</code> method is called for this * type of node. * * @param pass The pass whose <code>run()</code> method should be called. **/ public void runPass(Pass pass) { pass.run(this); } /** * Writes a string representation of this <code>ASTNode</code> to the * specified buffer. The representation written is parsable by the LBJ2 * compiler, but not very readable. * * @param buffer The buffer to write to. **/ public void write(StringBuffer buffer) { if (learner != null) { learner.write(buffer); buffer.append(" "); } buffer.append("normalizedby "); normalizer.write(buffer); } } /** * An intermediate class used during parsing to represent the various * clauses of an inference declaration. * * @author Nick Rizzolo **/ public static class Clause { /** Value of the <code>type</code> variable. */ public static final int HEAD_FINDER = 0; /** Value of the <code>type</code> variable. */ public static final int SUBJECTTO = 1; /** Value of the <code>type</code> variable. */ public static final int WITH = 2; /** Value of the <code>type</code> variable. */ public static final int NORMALIZER_DECLARATION = 3; /** String representations of the type names. */ public static final String[] typeNames = new String[]{ "", "subjectto", "with", "" }; /** The type of the clause. */ public int type; /** The argument of the clause. */ public ASTNode argument; /** * Full constructor. * * @param t The type. * @param a The argument node. **/ public Clause(int t, ASTNode a) { type = t; argument = a; } /** * Creates a new object with the same primitive data, and recursively * creates new member data objects as well. * * @return The clone node. **/ public Object clone() { return new Clause(type, (ASTNode) argument.clone()); } /** * Debugging utility method. * * @return A textual representation of this expression. **/ public String toString() { if (type == HEAD_FINDER || type == NORMALIZER_DECLARATION) return argument.toString(); return typeNames[type] + " " + argument; } } }
/* * Copyright (c) 2017, Tyler <https://github.com/tylerthardy> * Copyright (c) 2020, dekvall <https://github.com/dekvall> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.herbiboars; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.inject.Provides; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.inject.Inject; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import net.runelite.api.Client; import net.runelite.api.GameState; import net.runelite.api.MenuAction; import static net.runelite.api.ObjectID.DRIFTWOOD_30523; import static net.runelite.api.ObjectID.MUSHROOM_30520; import static net.runelite.api.ObjectID.ROCK_30519; import static net.runelite.api.ObjectID.ROCK_30521; import static net.runelite.api.ObjectID.ROCK_30522; import net.runelite.api.TileObject; import net.runelite.api.Varbits; import net.runelite.api.coords.WorldPoint; import net.runelite.api.events.GameObjectChanged; import net.runelite.api.events.GameObjectDespawned; import net.runelite.api.events.GameObjectSpawned; import net.runelite.api.events.GameStateChanged; import net.runelite.api.events.GroundObjectChanged; import net.runelite.api.events.GroundObjectDespawned; import net.runelite.api.events.GroundObjectSpawned; import net.runelite.api.events.MenuOptionClicked; import net.runelite.api.events.VarbitChanged; import net.runelite.client.callback.ClientThread; import net.runelite.client.config.ConfigManager; import net.runelite.client.eventbus.Subscribe; import net.runelite.client.plugins.Plugin; import net.runelite.client.plugins.PluginDescriptor; import net.runelite.client.ui.overlay.OverlayManager; import net.runelite.client.util.Text; import org.apache.commons.lang3.ArrayUtils; @PluginDescriptor( name = "Herbiboar", description = "Highlight starting rocks, trails, and the objects to search at the end of each trail", tags = {"herblore", "hunter", "skilling", "overlay"} ) @Slf4j @Getter public class HerbiboarPlugin extends Plugin { private static final List<WorldPoint> END_LOCATIONS = ImmutableList.of( new WorldPoint(3693, 3798, 0), new WorldPoint(3702, 3808, 0), new WorldPoint(3703, 3826, 0), new WorldPoint(3710, 3881, 0), new WorldPoint(3700, 3877, 0), new WorldPoint(3715, 3840, 0), new WorldPoint(3751, 3849, 0), new WorldPoint(3685, 3869, 0), new WorldPoint(3681, 3863, 0) ); private static final Set<Integer> START_OBJECT_IDS = ImmutableSet.of( ROCK_30519, MUSHROOM_30520, ROCK_30521, ROCK_30522, DRIFTWOOD_30523 ); private static final List<Integer> HERBIBOAR_REGIONS = ImmutableList.of( 14652, 14651, 14908, 14907 ); @Inject private Client client; @Inject private ClientThread clientThread; @Inject private OverlayManager overlayManager; @Inject private HerbiboarOverlay overlay; @Inject private HerbiboarMinimapOverlay minimapOverlay; /** * Objects which appear at the beginning of Herbiboar hunting trails */ private final Map<WorldPoint, TileObject> starts = new HashMap<>(); /** * Herbiboar hunting "footstep" trail objects */ private final Map<WorldPoint, TileObject> trails = new HashMap<>(); /** * Objects which trigger next trail (mushrooms, mud, seaweed, etc) */ private final Map<WorldPoint, TileObject> trailObjects = new HashMap<>(); /** * Tunnel where the Herbiboar is hiding at the end of a trail */ private final Map<WorldPoint, TileObject> tunnels = new HashMap<>(); /** * Trail object IDs which should be highlighted */ private final Set<Integer> shownTrails = new HashSet<>(); /** * Sequence of herbiboar spots searched along the current trail */ private final List<HerbiboarSearchSpot> currentPath = Lists.newArrayList(); private boolean inHerbiboarArea; private TrailToSpot nextTrail; private HerbiboarSearchSpot.Group currentGroup; private int finishId; private boolean started; private WorldPoint startPoint; private HerbiboarStart startSpot; private boolean ruleApplicable; @Provides HerbiboarConfig provideConfig(ConfigManager configManager) { return configManager.getConfig(HerbiboarConfig.class); } @Override protected void startUp() throws Exception { overlayManager.add(overlay); overlayManager.add(minimapOverlay); if (client.getGameState() == GameState.LOGGED_IN) { clientThread.invokeLater(() -> { inHerbiboarArea = checkArea(); updateTrailData(); }); } } @Override protected void shutDown() throws Exception { overlayManager.remove(overlay); overlayManager.remove(minimapOverlay); resetTrailData(); clearCache(); inHerbiboarArea = false; } private void updateTrailData() { if (!isInHerbiboarArea()) { return; } boolean pathActive = false; boolean wasStarted = started; // Get trail data for (HerbiboarSearchSpot spot : HerbiboarSearchSpot.values()) { for (TrailToSpot trail : spot.getTrails()) { int value = client.getVar(trail.getVarbit()); if (value == trail.getValue()) { // The trail after you have searched the spot currentGroup = spot.getGroup(); nextTrail = trail; // You never visit the same spot twice if (!currentPath.contains(spot)) { currentPath.add(spot); } } else if (value > 0) { // The current trail shownTrails.addAll(trail.getFootprintIds()); pathActive = true; } } } finishId = client.getVar(Varbits.HB_FINISH); // The started varbit doesn't get set until the first spot of the rotation has been searched // so we need to use the current group as an indicator of the rotation being started started = client.getVar(Varbits.HB_STARTED) > 0 || currentGroup != null; boolean finished = !pathActive && started; if (!wasStarted && started) { startSpot = HerbiboarStart.from(startPoint); } ruleApplicable = HerbiboarRule.canApplyRule(startSpot, currentPath); if (finished) { resetTrailData(); } } @Subscribe public void onMenuOptionClicked(MenuOptionClicked menuOpt) { if (!inHerbiboarArea || started || MenuAction.GAME_OBJECT_FIRST_OPTION != menuOpt.getMenuAction()) { return; } switch (Text.removeTags(menuOpt.getMenuTarget())) { case "Rock": case "Mushroom": case "Driftwood": startPoint = WorldPoint.fromScene(client, menuOpt.getParam0(), menuOpt.getParam1(), client.getPlane()); } } private void resetTrailData() { log.debug("Reset trail data"); shownTrails.clear(); currentPath.clear(); nextTrail = null; currentGroup = null; finishId = 0; started = false; startPoint = null; startSpot = null; ruleApplicable = false; } private void clearCache() { starts.clear(); trails.clear(); trailObjects.clear(); tunnels.clear(); } @Subscribe public void onGameStateChanged(GameStateChanged event) { switch (event.getGameState()) { case HOPPING: case LOGGING_IN: resetTrailData(); break; case LOADING: clearCache(); inHerbiboarArea = checkArea(); break; default: break; } } @Subscribe public void onVarbitChanged(VarbitChanged event) { updateTrailData(); } @Subscribe public void onGameObjectSpawned(GameObjectSpawned event) { onTileObject(null, event.getGameObject()); } @Subscribe public void onGameObjectChanged(GameObjectChanged event) { onTileObject(event.getPrevious(), event.getGameObject()); } @Subscribe public void onGameObjectDespawned(GameObjectDespawned event) { onTileObject(event.getGameObject(), null); } @Subscribe public void onGroundObjectSpawned(GroundObjectSpawned event) { onTileObject(null, event.getGroundObject()); } @Subscribe public void onGroundObjectChanged(GroundObjectChanged event) { onTileObject(event.getPrevious(), event.getGroundObject()); } @Subscribe public void onGroundObjectDespawned(GroundObjectDespawned event) { onTileObject(event.getGroundObject(), null); } // Store relevant GameObjects (starts, tracks on trails, objects used to trigger next trails, and tunnels) private void onTileObject(TileObject oldObject, TileObject newObject) { if (oldObject != null) { WorldPoint oldLocation = oldObject.getWorldLocation(); starts.remove(oldLocation); trails.remove(oldLocation); trailObjects.remove(oldLocation); tunnels.remove(oldLocation); } if (newObject == null) { return; } // Starts if (START_OBJECT_IDS.contains(newObject.getId())) { starts.put(newObject.getWorldLocation(), newObject); return; } // Trails if (HerbiboarSearchSpot.isTrail(newObject.getId())) { trails.put(newObject.getWorldLocation(), newObject); return; } // GameObject to trigger next trail (mushrooms, mud, seaweed, etc) if (HerbiboarSearchSpot.isSearchSpot(newObject.getWorldLocation())) { trailObjects.put(newObject.getWorldLocation(), newObject); return; } // Herbiboar tunnel if (END_LOCATIONS.contains(newObject.getWorldLocation())) { tunnels.put(newObject.getWorldLocation(), newObject); } } private boolean checkArea() { final int[] mapRegions = client.getMapRegions(); for (int region : HERBIBOAR_REGIONS) { if (ArrayUtils.contains(mapRegions, region)) { return true; } } return false; } List<WorldPoint> getEndLocations() { return END_LOCATIONS; } }
/* * Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test @summary test Resource Bundle for bug 4168625 @build Bug4168625Class Bug4168625Getter Bug4168625Resource Bug4168625Resource3 Bug4168625Resource3_en Bug4168625Resource3_en_CA Bug4168625Resource3_en_IE Bug4168625Resource3_en_US Bug4168625Resource2_en_US Bug4168625Resource2 @run main/timeout=600 Bug4168625Test @bug 4168625 6993339 */ /* * * * (C) Copyright IBM Corp. 1999 - All Rights Reserved * * The original version of this source code and documentation is * copyrighted and owned by IBM. These materials are provided * under terms of a License Agreement between IBM and Sun. * This technology is protected by multiple US and International * patents. This notice and attribution to IBM may not be removed. * */ import java.util.*; import java.io.*; /** * This test tries to correct two efficiency problems with the caching * mechanism of ResourceBundle. It also allows concurrent loads * of resource bundles to be performed if the bundles are unrelated (ex. a * load of a local system resource by one thread while another thread is * doing a slow load over a network). */ public class Bug4168625Test extends RBTestFmwk { public static void main(String[] args) throws Exception { new Bug4168625Test().run(args); } /** * Verify that getBundle will do something reasonable when part of the * resource hierarchy is missing. */ public void testMissingParent() throws Exception { final Locale oldDefault = Locale.getDefault(); Locale.setDefault(new Locale("en", "US")); try { final Locale loc = new Locale("jf", "jf"); ResourceBundle bundle = ResourceBundle.getBundle("Bug4168625Resource2", loc); final String s1 = bundle.getString("name"); if (!s1.equals("Bug4168625Resource2_en_US")) { errln("getBundle did not find leaf bundle: "+bundle.getClass().getName()); } final String s2 = bundle.getString("baseName"); if (!s2.equals("Bug4168625Resource2")) { errln("getBundle did not set up proper inheritance chain"); } } finally { Locale.setDefault(oldDefault); } } /** * Previous versions of ResourceBundle have had the following * caching behavior. Assume the classes * Bug4168625Resource_fr_FR, Bug4168625Resource_fr, * Bug4168625Resource_en_US, and Bug4168625Resource_en don't * exist. The class Bug4168625Resource does. Assume the default * locale is en_US. * <P> * <pre> * getBundle("Bug4168625Resource", new Locale("fr", "FR")); * -->try to load Bug4168625Resource_fr_FR * -->try to load Bug4168625Resource_fr * -->try to load Bug4168625Resource_en_US * -->try to load Bug4168625Resource_en * -->load Bug4168625Resource * -->cache Bug4168625Resource as Bug4168625Resource * -->cache Bug4168625Resource as Bug4168625Resource_en * -->cache Bug4168625Resource as Bug4168625Resource_en_US * -->return Bug4168625Resource * getBundle("Bug4168625Resource", new Locale("fr", "FR")); * -->try to load Bug4168625Resource_fr_FR * -->try to load Bug4168625Resource_fr * -->find cached Bug4168625Resource_en_US * -->return Bug4168625Resource_en_US (which is realy Bug4168625Resource) * </pre> * <P> * The second call causes two loads for Bug4168625Resource_fr_FR and * Bug4168625Resource_en which have already been tried and failed. These * two loads should have been cached as Bug4168625Resource by the first * call. * * The following, more efficient behavior is desired: * <P> * <pre> * getBundle("Bug4168625Resource", new Locale("fr", "FR")); * -->try to load Bug4168625Resource_fr_FR * -->try to load Bug4168625Resource_fr * -->try to load Bug4168625Resource_en_US * -->try to load Bug4168625Resource_en * -->load Bug4168625Resource * -->cache Bug4168625Resource as Bug4168625Resource * -->cache Bug4168625Resource as Bug4168625Resource_en * -->cache Bug4168625Resource as Bug4168625Resource_en_US * -->cache Bug4168625Resource as Bug4168625Resource_fr * -->cache Bug4168625Resource as Bug4168625Resource_fr_FR * -->return Bug4168625Resource * getBundle("Bug4168625Resource", new Locale("fr", "FR")); * -->find cached Bug4168625Resource_fr_FR * -->return Bug4168625Resource_en_US (which is realy Bug4168625Resource) * </pre> * <P> * */ public void testCacheFailures() throws Exception { checkResourceLoading("Bug4168625Resource", new Locale("fr", "FR")); } /** * Previous versions of ResourceBundle have had the following * caching behavior. Assume the current locale is locale is en_US. * The classes Bug4168625Resource_en_US, and Bug4168625Resource_en don't * exist. The class Bug4168625Resource does. * <P> * <pre> * getBundle("Bug4168625Resource", new Locale("en", "US")); * -->try to load Bug4168625Resource_en_US * -->try to load Bug4168625Resource_en * -->try to load Bug4168625Resource_en_US * -->try to load Bug4168625Resource_en * -->load Bug4168625Resource * -->cache Bug4168625Resource as Bug4168625Resource * -->cache Bug4168625Resource as Bug4168625Resource_en * -->cache Bug4168625Resource as Bug4168625Resource_en_US * -->return Bug4168625Resource * </pre> * <P> * The redundant loads of Bug4168625Resource_en_US and Bug4168625Resource_en * should not occur. The desired behavior is as follows: * <P> * <pre> * getBundle("Bug4168625Resource", new Locale("en", "US")); * -->try to load Bug4168625Resource_en_US * -->try to load Bug4168625Resource_en * -->load Bug4168625Resource * -->cache Bug4168625Resource as Bug4168625Resource * -->cache Bug4168625Resource as Bug4168625Resource_en * -->cache Bug4168625Resource as Bug4168625Resource_en_US * -->return Bug4168625Resource * </pre> * <P> */ public void testRedundantLoads() throws Exception { checkResourceLoading("Bug4168625Resource", Locale.getDefault()); } /** * Ensure that resources are only loaded once and are cached correctly */ private void checkResourceLoading(String resName, Locale l) throws Exception { final Loader loader = new Loader( new String[] { "Bug4168625Class" }, new String[] { "Bug4168625Resource3_en_US", "Bug4168625Resource3_en_CA" }); final Class c = loader.loadClass("Bug4168625Class"); Bug4168625Getter test = (Bug4168625Getter)c.newInstance(); final String resClassName; if (l.toString().length() > 0) { resClassName = resName+"_"+l; } else { resClassName = resName; } Object bundle = test.getResourceBundle(resName, l); loader.logClasses("Initial lookup of "+resClassName+" generated the following loads:"); final Vector lastLoad = new Vector(loader.loadedClasses.size()); boolean dups = false; for (int i = loader.loadedClasses.size() - 1; i >= 0 ; i--) { final Object item = loader.loadedClasses.elementAt(i); loader.loadedClasses.removeElementAt(i); if (loader.loadedClasses.contains(item)) { logln("Resource loaded more than once: "+item); dups = true; } else { lastLoad.addElement(item); } } if (dups) { errln("ResourceBundle loaded some classes multiple times"); } loader.loadedClasses.removeAllElements(); bundle = test.getResourceBundle(resName, l); loader.logClasses("Second lookup of "+resClassName+" generated the following loads:"); dups = false; for (int i = 0; i < loader.loadedClasses.size(); i++) { Object item = loader.loadedClasses.elementAt(i); if (lastLoad.contains(item)) { logln("ResourceBundle did not cache "+item+" correctly"); dups = true; } } if (dups) { errln("Resource bundle not caching some classes properly"); } } private class ConcurrentLoadingThread extends Thread { private Loader loader; public Object bundle; private Bug4168625Getter test; private Locale locale; private String resourceName = "Bug4168625Resource3"; public ConcurrentLoadingThread(Loader loader, Bug4168625Getter test, Locale l, String resourceName) { this.loader = loader; this.test = test; this.locale = l; this.resourceName = resourceName; } public ConcurrentLoadingThread(Loader loader, Bug4168625Getter test, Locale l) { this.loader = loader; this.test = test; this.locale = l; } public void run() { try { logln(">>"+threadName()+">run"); bundle = test.getResourceBundle(resourceName, locale); } catch (Exception e) { errln("TEST CAUGHT UNEXPECTED EXCEPTION: "+e); } finally { logln("<<"+threadName()+"<run"); } } public synchronized void waitUntilPinged() { logln(">>"+threadName()+">waitUntilPinged"); loader.notifyEveryone(); try { wait(30000); //wait 30 seconds max. } catch (InterruptedException e) { logln("Test deadlocked."); } logln("<<"+threadName()+"<waitUntilPinged"); } public synchronized void ping() { logln(">>"+threadName()+">ping "+threadName(this)); notifyAll(); logln("<<"+threadName()+"<ping "+threadName(this)); } }; /** * This test ensures that multiple resources can be loading at the same * time as long as they don't depend on each other in some way. */ public void testConcurrentLoading() throws Exception { final Loader loader = new Loader( new String[] { "Bug4168625Class" }, new String[] { "Bug4168625Resource3_en_US", "Bug4168625Resource3_en_CA" }); final Class c = loader.loadClass("Bug4168625Class"); final Bug4168625Getter test = (Bug4168625Getter)c.newInstance(); ConcurrentLoadingThread thread1 = new ConcurrentLoadingThread(loader, test, new Locale("en", "CA")); ConcurrentLoadingThread thread2 = new ConcurrentLoadingThread(loader, test, new Locale("en", "IE")); thread1.start(); //start thread 1 loader.waitForNotify(1); //wait for thread1 to do getBundle & block in loader thread2.start(); //start second thread thread2.join(); //wait until thread2 terminates. //Thread1 should be blocked inside getBundle at the class loader //Thread2 should have completed its getBundle call and terminated if (!thread1.isAlive() || thread2.isAlive()) { errln("ResourceBundle.getBundle not allowing legal concurrent loads"); } thread1.ping(); //continue thread1 thread1.join(); } /** * This test ensures that a resource loads correctly (with all its parents) * when memory is very low (ex. the cache gets purged during a load). */ public void testLowMemoryLoad() throws Exception { final String[] classToLoad = { "Bug4168625Class" }; final String[] classToWait = { "Bug4168625Resource3_en_US","Bug4168625Resource3_en","Bug4168625Resource3" }; final Loader loader = new Loader(classToLoad, classToWait); final Class c = loader.loadClass("Bug4168625Class"); final Bug4168625Getter test = (Bug4168625Getter)c.newInstance(); causeResourceBundleCacheFlush(); ConcurrentLoadingThread thread1 = new ConcurrentLoadingThread(loader, test, new Locale("en", "US")); thread1.start(); //start thread 1 loader.waitForNotify(1); //wait for thread1 to do getBundle(en_US) & block in loader causeResourceBundleCacheFlush(); //cause a cache flush thread1.ping(); //kick thread 1 loader.waitForNotify(2); //wait for thread1 to do getBundle(en) & block in loader causeResourceBundleCacheFlush(); //cause a cache flush thread1.ping(); //kick thread 1 loader.waitForNotify(3); //wait for thread1 to do getBundle(en) & block in loader causeResourceBundleCacheFlush(); //cause a cache flush thread1.ping(); //kick thread 1 thread1.join(); //wait until thread1 terminates ResourceBundle bundle = (ResourceBundle)thread1.bundle; String s1 = bundle.getString("Bug4168625Resource3_en_US"); String s2 = bundle.getString("Bug4168625Resource3_en"); String s3 = bundle.getString("Bug4168625Resource3"); if ((s1 == null) || (s2 == null) || (s3 == null)) { errln("Bundle not constructed correctly. The parent chain is incorrect."); } } /** * A simple class loader that loads classes from the current * working directory. The loader will block the current thread * of execution before it returns when it tries to load * the class "Bug4168625Resource3_en_US". */ private static final String CLASS_PREFIX = ""; private static final String CLASS_SUFFIX = ".class"; private static final class SimpleLoader extends ClassLoader { private boolean network = false; public SimpleLoader() { super(SimpleLoader.class.getClassLoader()); this.network = false; } public SimpleLoader(boolean simulateNetworkLoad) { super(SimpleLoader.class.getClassLoader()); this.network = simulateNetworkLoad; } public Class loadClass(final String className, final boolean resolveIt) throws ClassNotFoundException { Class result; synchronized (this) { result = findLoadedClass(className); if (result == null) { if (network) { try { Thread.sleep(100); } catch (java.lang.InterruptedException e) { } } result = getParent().loadClass(className); if ((result != null) && resolveIt) { resolveClass(result); } } } return result; } } private final class Loader extends ClassLoader { public final Vector loadedClasses = new Vector(); private String[] classesToLoad; private String[] classesToWaitFor; public Loader() { super(Loader.class.getClassLoader()); classesToLoad = new String[0]; classesToWaitFor = new String[0]; } public Loader(final String[] classesToLoadIn, final String[] classesToWaitForIn) { super(Loader.class.getClassLoader()); classesToLoad = classesToLoadIn; classesToWaitFor = classesToWaitForIn; } /** * Load a class. Files we can load take preference over ones the system * can load. */ private byte[] getClassData(final String className) { boolean shouldLoad = false; for (int i = classesToLoad.length-1; i >= 0; --i) { if (className.equals(classesToLoad[i])) { shouldLoad = true; break; } } if (shouldLoad) { final String name = CLASS_PREFIX+className+CLASS_SUFFIX; try { final InputStream fi = this.getClass().getClassLoader().getResourceAsStream(name); final byte[] result = new byte[fi.available()]; fi.read(result); return result; } catch (Exception e) { logln("Error loading test class: "+name); logln(e.toString()); return null; } } else { return null; } } /** * Load a class. Files we can load take preference over ones the system * can load. */ public Class loadClass(final String className, final boolean resolveIt) throws ClassNotFoundException { Class result; synchronized (this) { logln(">>"+threadName()+">load "+className); loadedClasses.addElement(className); result = findLoadedClass(className); if (result == null) { final byte[] classData = getClassData(className); if (classData == null) { //we don't have a local copy of this one logln("Loading system class: "+className); result = loadFromSystem(className); } else { result = defineClass(classData, 0, classData.length); if (result == null) { //there was an error defining the class result = loadFromSystem(className); } } if ((result != null) && resolveIt) { resolveClass(result); } } } for (int i = classesToWaitFor.length-1; i >= 0; --i) { if (className.equals(classesToWaitFor[i])) { rendezvous(); break; } } logln("<<"+threadName()+"<load "+className); return result; } /** * Delegate loading to its parent class loader that loads the test classes. * In othervm mode, the parent class loader is the system class loader; * in samevm mode, the parent class loader is the jtreg URLClassLoader. */ private Class loadFromSystem(String className) throws ClassNotFoundException { return getParent().loadClass(className); } public void logClasses(String title) { logln(title); for (int i = 0; i < loadedClasses.size(); i++) { logln(" "+loadedClasses.elementAt(i)); } logln(""); } public int notifyCount = 0; public int waitForNotify(int count) { return waitForNotify(count, 0); } public synchronized int waitForNotify(int count, long time) { logln(">>"+threadName()+">waitForNotify"); if (count > notifyCount) { try { wait(time); } catch (InterruptedException e) { } } else { logln(" count("+count+") > notifyCount("+notifyCount+")"); } logln("<<"+threadName()+"<waitForNotify"); return notifyCount; } private synchronized void notifyEveryone() { logln(">>"+threadName()+">notifyEveryone"); notifyCount++; notifyAll(); logln("<<"+threadName()+"<notifyEveryone"); } private void rendezvous() { final Thread current = Thread.currentThread(); if (current instanceof ConcurrentLoadingThread) { ((ConcurrentLoadingThread)current).waitUntilPinged(); } } } private static String threadName() { return threadName(Thread.currentThread()); } private static String threadName(Thread t) { String temp = t.toString(); int ndx = temp.indexOf("Thread["); temp = temp.substring(ndx + "Thread[".length()); ndx = temp.indexOf(','); temp = temp.substring(0, ndx); return temp; } /** Fill memory to force all SoftReferences to be GCed */ private void causeResourceBundleCacheFlush() { logln("Filling memory..."); int allocationSize = 1024; Vector memoryHog = new Vector(); try { while (true) { memoryHog.addElement(new byte[allocationSize]); allocationSize *= 2; } } catch (Throwable e) { logln("Caught "+e+" filling memory"); } finally{ memoryHog = null; System.gc(); } logln("last allocation size: " + allocationSize); } /** * NOTE: this problem is not externally testable and can only be * verified through code inspection unless special code to force * a task switch is inserted into ResourceBundle. * The class Bug4168625Resource_sp exists. It's parent bundle * (Bug4168625Resource) contains a resource string with the tag * "language" but Bug4168625Resource_sp does not. * Assume two threads are executing, ThreadA and ThreadB and they both * load a resource Bug4168625Resource with from sp locale. * ResourceBundle.getBundle adds a bundle to the bundle cache (in * findBundle) before it sets the bundle's parent (in getBundle after * returning from findBundle). * <P> * <pre> * ThreadA.getBundle("Bug4168625Resource", new Locale("sp")); * A-->load Bug4168625Resource_sp * A-->find cached Bug4168625Resource * A-->cache Bug4168625Resource_sp as Bug4168625Resource_sp * ThreadB.getBundle("Bug4168625Resource", new Locale("sp")); * B-->find cached Bug4168625Resource_sp * B-->return Bug4168625Resource_sp * ThreadB.bundle.getString("language"); * B-->try to find "language" in Bug4168625Resource_sp * B-->Bug4168625Resource_sp does not have a parent, so return null; * ThreadB.System.out.println("Some unknown country"); * A-->set parent of Bug4168625Resource_sp to Bug4168625Resource * A-->return Bug4168625Resource_sp (the same bundle ThreadB got) * ThreadA.bundle.getString("language"); * A-->try to find "language" in Bug4168625Resource_sp * A-->try to find "language" in Bug4168625Resource (parent of Bug4168625Resource_sp) * A-->return the string * ThreadA.System.out.println("Langauge = "+country); * ThreadB.bundle.getString("language"); * B-->try to find "language" in Bug4168625Resource_sp * B-->try to find "language" in Bug4168625Resource (parent of Bug4168625Resource_sp) * B-->return the string * ThreadB.System.out.println("Langauge = "+country); * </pre> * <P> * Note that the first call to getString() by ThreadB returns null, but the second * returns a value. Thus to ThreadB, the bundle appears to change. ThreadA gets * the expected results right away. */ }
package net.ssehub.easy.instantiation.core.model.vilTypes; import java.io.File; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import de.uni_hildesheim.sse.ModelUtility; import net.ssehub.easy.basics.modelManagement.ModelInfo; import net.ssehub.easy.basics.modelManagement.ModelManagementException; import net.ssehub.easy.basics.progress.ProgressObserver; import net.ssehub.easy.dslCore.StandaloneInitializer; import net.ssehub.easy.instantiation.core.model.AbstractTest; import net.ssehub.easy.instantiation.core.model.common.VilException; import net.ssehub.easy.instantiation.core.model.vilTypes.configuration.AbstractIvmlVariable; import net.ssehub.easy.instantiation.core.model.vilTypes.configuration.Configuration; import net.ssehub.easy.instantiation.core.model.vilTypes.configuration.DecisionVariable; import net.ssehub.easy.instantiation.core.model.vilTypes.configuration.IvmlElement; import net.ssehub.easy.instantiation.core.model.vilTypes.configuration.NoVariableFilter; import net.ssehub.easy.instantiation.core.model.vilTypes.configuration.Utils; import net.ssehub.easy.varModel.confModel.AssignmentState; import net.ssehub.easy.varModel.confModel.IDecisionVariable; import net.ssehub.easy.varModel.management.VarModel; import net.ssehub.easy.varModel.model.AbstractVariable; import net.ssehub.easy.varModel.model.ContainableModelElement; import net.ssehub.easy.varModel.model.Project; import net.ssehub.easy.varModel.model.ProjectImport; import net.ssehub.easy.varModel.model.datatypes.Compound; import net.ssehub.easy.varModel.model.datatypes.IDatatype; import net.ssehub.easy.varModel.model.datatypes.Reference; import net.ssehub.easy.varModel.varModel.testSupport.DefaultConfiguration; /** * Performs tests on the configuration wrapper. * * @author Holger Eichelberger */ public class ConfigurationTests extends AbstractTest { protected static final ProgressObserver OBSERVER = ProgressObserver.NO_OBSERVER; /** * Derives the sub-test dir. * * @return the sub-test dir */ private static final File cfgTestDataDir() { return new File(determineTestDataDir(), "ivml"); } /** * Starts up the test. */ @BeforeClass public static void startUp() { ModelUtility.setResourceInitializer(new StandaloneInitializer()); AbstractTest.startUp(); try { VarModel.INSTANCE.loaders().registerLoader(ModelUtility.INSTANCE, OBSERVER); VarModel.INSTANCE.locations().addLocation(cfgTestDataDir(), OBSERVER); } catch (ModelManagementException e) { e.printStackTrace(System.err); Assert.assertTrue(false); // shall not happen } } /** * Tears down the test. */ @AfterClass public static void shutDown() { try { VarModel.INSTANCE.locations().removeLocation(cfgTestDataDir(), OBSERVER); } catch (ModelManagementException e) { e.printStackTrace(System.err); Assert.assertTrue(false); // shall not happen } } /** * Tests the configuration wrapper. */ @Test public void testConfiguration() { net.ssehub.easy.varModel.confModel.Configuration cfg = DefaultConfiguration.createDefaultConfiguration(); Assert.assertNotNull("creating default IVML configuration failed", cfg); Configuration configuration = new Configuration(cfg); Assert.assertNotNull(configuration); Project project = cfg.getProject(); Assert.assertNotNull(project); Assert.assertEquals(configuration.getName(), cfg.getName()); Assert.assertEquals(configuration.getQualifiedName(), project.getQualifiedName()); Assert.assertEquals(configuration.getTypeName(), project.getType().getName()); Assert.assertEquals(configuration.getQualifiedType(), project.getType().getQualifiedName()); TestVisitor visitor = new TestVisitor(cfg, configuration); project.accept(visitor); try { compare(configuration.selectByName("pInt"), cfg, new NameSelector("pInt")); compare(configuration.selectByName("p.*"), cfg, new NamePatternSelector("p.*")); compare(configuration.selectByName("sse.*"), cfg, new NamePatternSelector("sse.*")); compare(configuration.selectByType("Boolean"), cfg, new TypeSelector("Boolean")); compare(configuration.selectByType("Int.*"), cfg, new TypePatternSelector("Int.*")); compare(configuration.selectByType("SSE.*"), cfg, new TypePatternSelector("SSE.*")); compare(configuration.selectByAttribute("bindingTime"), cfg, new AttributeSelector("bindingTime")); compare(configuration.selectByAttribute("bind.*"), cfg, new AttributePatternSelector("bind.*")); compare(configuration.selectByAttribute("bla.*"), cfg, new AttributePatternSelector("bla.*")); // emulate external call IvmlElement bindingTime = Utils.getAttributeDeclaration(configuration, "noAttribute"); Assert.assertNull(bindingTime); // just a x-check bindingTime = Utils.getAttributeDeclaration(configuration, "bindingTime"); Assert.assertNotNull(bindingTime); compare(configuration.selectByAttribute(bindingTime.getQualifiedName(), 1), cfg, new AttributeSelector("bindingTime", 1)); compare(configuration.selectByAttribute(bindingTime.getQualifiedName(), 100), cfg, new AttributeSelector("bindingTime", 100)); Assert.assertEquals(configuration.selectByAttribute( bindingTime.getQualifiedName(), null).variables().size(), 0); // the case that the element was not resolved Assert.assertEquals(configuration.selectByAttribute(null, 100).variables().size(), 0); } catch (VilException e) { Assert.fail("unexpected exception " + e.getMessage()); } } /** * Generically tests the variables in <code>configuration</code> against selected variables in <code>cfg</code>. * * @param configuration the configuration to be tested * @param cfg an IVML configuration providing the actual data * @param selector the selector determining the variables to be considered */ private void compare(Configuration configuration, net.ssehub.easy.varModel.confModel.Configuration cfg, IVariableSelector selector) { Map<String, IDecisionVariable> reference = new HashMap<String, IDecisionVariable>(); put(cfg.getProject(), reference, cfg, selector); Iterator<DecisionVariable> iter = configuration.variables().iterator(); while (iter.hasNext()) { DecisionVariable var = iter.next(); IDecisionVariable decVar = reference.get(var.getQualifiedName()); if (null != decVar) { reference.remove(var.getQualifiedName()); } } Assert.assertTrue("reference not empty: " + reference, reference.isEmpty()); } /** * Puts the variables of project into <code>data</code> if they comply with <code>selector</code>. * * @param project the project to be iterated over * @param data the data to be modified as a side effect (fqn-instance mapping) * @param cfg the underlying IVML configuration * @param selector the selector instance */ private void put(Project project, Map<String, IDecisionVariable> data, net.ssehub.easy.varModel.confModel.Configuration cfg, IVariableSelector selector) { for (int i = 0; i < project.getImportsCount(); i++) { ProjectImport imp = project.getImport(i); if (null != imp.getResolved()) { put(imp.getResolved(), data, cfg, selector); } } for (int e = 0; e < project.getElementCount(); e++) { ContainableModelElement elt = project.getElement(e); if (elt instanceof AbstractVariable) { IDecisionVariable var = cfg.getDecision((AbstractVariable) elt); if (null != var && AssignmentState.FROZEN == var.getState() && selector.select(var)) { data.put(var.getDeclaration().getQualifiedName(), var); } } } } /** * Returns a testing IVML configuration instance for the given <code>modelName</code>. * * @param modelName the model name * @return the configuration */ private static net.ssehub.easy.varModel.confModel.Configuration getConfiguration(String modelName) { net.ssehub.easy.varModel.confModel.Configuration config = null; List<ModelInfo<net.ssehub.easy.varModel.model.Project>> info = VarModel.INSTANCE.availableModels().getModelInfo(modelName); Assert.assertTrue(info.size() > 0); try { config = new net.ssehub.easy.varModel.confModel.Configuration(VarModel.INSTANCE.load(info.get(0))); } catch (ModelManagementException e) { Assert.fail(e.getMessage()); } return config; } /** * Creates a runtime VIL configuration wrapper. * * @param cfg the configuration to test * @return the configuration wrapper instance */ private static Configuration getRuntimeConfiguration(net.ssehub.easy.varModel.confModel.Configuration cfg) { return new Configuration(cfg, NoVariableFilter.INSTANCE); } /** * Asserts an empty change configuration on a non-runtime VIL configuration wrapper. * * @param cfg the configuration to test */ private static void assertChangeEmpty(net.ssehub.easy.varModel.confModel.Configuration cfg) { assertChangeEmpty(new Configuration(cfg)); } /** * Asserts an empty change configuration. * * @param cfg the configuration to test */ private static void assertChangeEmpty(Configuration cfg) { Assert.assertEquals(0, cfg.selectChanged().variables().size()); Assert.assertEquals(0, cfg.selectChangedWithContext().variables().size()); } /** * Tests a simple changed configuration. */ @Test public void testSelectChanged1unchanged() { net.ssehub.easy.varModel.confModel.Configuration ivml = getConfiguration("context1"); assertChangeEmpty(ivml); // non-rt cfg has empty changes Configuration baseCfg = getRuntimeConfiguration(ivml); DecisionVariable var = baseCfg.getByName("i2"); var.setValue(var.getValue()); // effectively the same value - shall not cause a change assertChangeEmpty(baseCfg); } /** * Tests a simple changed configuration. */ @Test public void testSelectChanged1() { net.ssehub.easy.varModel.confModel.Configuration ivml = getConfiguration("context1"); assertChangeEmpty(ivml); // non-rt cfg has empty changes Configuration baseCfg = getRuntimeConfiguration(ivml); DecisionVariable var = baseCfg.getByName("i2"); Object varValue = var.getValue(); Assert.assertEquals(var.getOriginalValue(), varValue); var.setValue(2); // cause a change Configuration cfg = baseCfg.selectChanged(); Assert.assertEquals(1, cfg.variables().size()); Assert.assertNull(cfg.getByName("i1")); Assert.assertNotNull(cfg.getByName("i2")); cfg = baseCfg.selectChangedWithContext(); // no real context, shall be the same as selectChanged Assert.assertEquals(1, cfg.variables().size()); Assert.assertNull(cfg.getByName("i1")); Assert.assertNotNull(cfg.getByName("i2")); Assert.assertEquals(var.getOriginalValue(), varValue); } /** * Tests a simple changed compound configuration. */ @Test public void testSelectChanged2() { net.ssehub.easy.varModel.confModel.Configuration ivml = getConfiguration("context2"); assertChangeEmpty(ivml); // non-rt cfg has empty changes Configuration baseCfg = getRuntimeConfiguration(ivml); Object origValue = setCompoundVar(baseCfg, "cmp", "i2", 2); // cause a change Configuration cfg = baseCfg.selectChanged(); Assert.assertEquals(1, cfg.variables().size()); Assert.assertNull(cfg.getByName("j")); Assert.assertNotNull(cfg.getByName("cmp")); Assert.assertNull(getCompoundVar(cfg, "cmp", "i1")); Assert.assertNotNull(getCompoundVar(cfg, "cmp", "i2")); cfg = baseCfg.selectChangedWithContext(); Assert.assertNull(cfg.getByName("j")); Assert.assertNotNull(cfg.getByName("cmp")); Assert.assertNotNull(getCompoundVar(cfg, "cmp", "i1")); // in context Assert.assertNotNull(getCompoundVar(cfg, "cmp", "i2")); Assert.assertEquals(origValue, getCompoundVar(cfg, "cmp", "i2").getOriginalValue()); } /** * Tests a simple changed compound sequence configuration. */ @Test public void testSelectChanged3() { net.ssehub.easy.varModel.confModel.Configuration ivml = getConfiguration("context3"); assertChangeEmpty(ivml); // non-rt cfg has empty changes Configuration baseCfg = getRuntimeConfiguration(ivml); DecisionVariable seq = baseCfg.getByName("cmpSeq"); Assert.assertNotNull(seq); Sequence<DecisionVariable> vars = seq.variables(); Assert.assertEquals(2, vars.size()); vars.at(0).getByName("i2").setValue(2); // cause a change Configuration cfg = baseCfg.selectChanged(); Assert.assertEquals(1, cfg.variables().size()); Assert.assertNull(cfg.getByName("j")); seq = cfg.getByName("cmpSeq"); Assert.assertNotNull(seq); vars = seq.variables(); Assert.assertEquals(1, vars.size()); Assert.assertNull(vars.at(0).getByName("i1")); Assert.assertNotNull(vars.at(0).getByName("i2")); cfg = baseCfg.selectChangedWithContext(); Assert.assertEquals(1, cfg.variables().size()); Assert.assertNull(cfg.getByName("j")); seq = cfg.getByName("cmpSeq"); assertAssignable(net.ssehub.easy.varModel.model.datatypes.Sequence.TYPE, seq); Assert.assertNotNull(seq); vars = seq.variables(); Assert.assertEquals(1, vars.size()); DecisionVariable elt = vars.at(0); assertAssignable(Compound.TYPE, elt); Assert.assertNotNull(elt.getByName("i1")); // in context Assert.assertNotNull(elt.getByName("i2")); } /** * Returns the variable of a compound slot. * * @param cfg the configuration * @param name the name of the top-level variable * @param slot the slot name * @return the variable */ private DecisionVariable getCompoundVar(Configuration cfg, String name, String slot) { DecisionVariable var = cfg.getByName(name); return null == var ? null : var.getByName(slot); } /** * Changes the value of a compound slot. * * @param cfg the configuration * @param name the name of the top-level variable * @param slot the slot name * @param value the new value * @return the value before */ private Object setCompoundVar(Configuration cfg, String name, String slot, Object value) { DecisionVariable var = getCompoundVar(cfg, name, slot); Object result = var.getValue(); var.setValue(value); return result; } /** * Tests a simple changed compound reference configuration. */ @Test public void testSelectChanged4() { net.ssehub.easy.varModel.confModel.Configuration ivml = getConfiguration("context4"); assertChangeEmpty(ivml); // non-rt cfg has empty changes Configuration baseCfg = getRuntimeConfiguration(ivml); Object origValue = setCompoundVar(baseCfg, "cmp1", "i2", 2); // cause a change Configuration cfg = baseCfg.selectChanged(); Assert.assertEquals(1, cfg.variables().size()); Assert.assertNull(cfg.getByName("j")); Assert.assertNull(cfg.getByName("cmpSeq")); Assert.assertNotNull(cfg.getByName("cmp1")); Assert.assertNull(cfg.getByName("cmp2")); Assert.assertNull(getCompoundVar(cfg, "cmp1", "i1")); Assert.assertNotNull(getCompoundVar(cfg, "cmp1", "i2")); cfg = baseCfg.selectChangedWithContext(); Assert.assertEquals(2, cfg.variables().size()); Assert.assertNull(cfg.getByName("j")); Assert.assertNull(cfg.getByName("cmp2")); Assert.assertNotNull(cfg.getByName("cmp1")); Assert.assertNotNull(getCompoundVar(cfg, "cmp1", "i1")); // in context Assert.assertNotNull(getCompoundVar(cfg, "cmp1", "i2")); DecisionVariable seq = cfg.getByName("cmpSeq"); assertAssignable(net.ssehub.easy.varModel.model.datatypes.Sequence.TYPE, seq); Assert.assertNotNull(seq); Sequence<DecisionVariable> vars = seq.variables(); Assert.assertEquals(1, vars.size()); DecisionVariable elt = vars.at(0); assertAssignable(Reference.TYPE, elt); Assert.assertNotNull(vars.at(0).getByName("i1")); Assert.assertNotNull(vars.at(0).getByName("i2")); Assert.assertEquals(origValue, getCompoundVar(baseCfg, "cmp1", "i2").getOriginalValue()); } /** * Asserts that the type of <code>var</code> is assignable to <code>type</code>. * * @param type the type defining the required base type * @param var the variable to check */ private static void assertAssignable(IDatatype type, AbstractIvmlVariable var) { Assert.assertTrue(type.isAssignableFrom(var.getVariable().getDeclaration().getType())); } /** * Prints a sequence and its values (debugging). * * @param seq the sequence */ private void println(Sequence<DecisionVariable> seq) { System.out.print("{"); for (int i = 0; i < seq.size(); i++) { if (i > 0) { System.out.print(", "); } DecisionVariable dv = seq.at(i); System.out.print(dv.getName() + " (" + dv.getQualifiedName() + ") = " + dv.getValue()); } System.out.println("}"); } /** * Prints a configuration and its values (debugging). * * @param cfg the configuration */ @SuppressWarnings("unused") private void println(Configuration cfg) { Sequence<DecisionVariable> seq = cfg.variables(); System.out.print("cfg " + cfg.getName() + " = "); println(seq); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.fielddata.fieldcomparator; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; /** * Comparator source for string/binary values. */ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparatorSource { private final IndexFieldData<?> indexFieldData; private final MultiValueMode sortMode; private final Object missingValue; private final Nested nested; public BytesRefFieldComparatorSource(IndexFieldData<?> indexFieldData, Object missingValue, MultiValueMode sortMode, Nested nested) { this.indexFieldData = indexFieldData; this.sortMode = sortMode; this.missingValue = missingValue; this.nested = nested; } @Override public SortField.Type reducedType() { return SortField.Type.STRING; } @Override public Object missingValue(boolean reversed) { if (sortMissingFirst(missingValue) || sortMissingLast(missingValue)) { if (sortMissingLast(missingValue) ^ reversed) { return SortField.STRING_LAST; } else { return SortField.STRING_FIRST; } } // otherwise we fill missing values ourselves return null; } protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException { return indexFieldData.load(context).getBytesValues(); } protected void setScorer(Scorer scorer) {} @Override public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed; final BytesRef missingBytes = (BytesRef) missingObject(missingValue, reversed); if (indexFieldData instanceof IndexOrdinalsFieldData) { return new FieldComparator.TermOrdValComparator(numHits, null, sortMissingLast) { @Override protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException { final RandomAccessOrds values = ((IndexOrdinalsFieldData) indexFieldData).load(context).getOrdinalsValues(); final SortedDocValues selectedValues; if (nested == null) { selectedValues = sortMode.select(values); } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); selectedValues = sortMode.select(values, rootDocs, innerDocs); } if (sortMissingFirst(missingValue) || sortMissingLast(missingValue)) { return selectedValues; } else { return new ReplaceMissing(selectedValues, missingBytes); } } @Override public void setScorer(Scorer scorer) { BytesRefFieldComparatorSource.this.setScorer(scorer); } }; } final BytesRef nullPlaceHolder = new BytesRef(); final BytesRef nonNullMissingBytes = missingBytes == null ? nullPlaceHolder : missingBytes; return new FieldComparator.TermValComparator(numHits, null, sortMissingLast) { @Override protected BinaryDocValues getBinaryDocValues(LeafReaderContext context, String field) throws IOException { final SortedBinaryDocValues values = getValues(context); final BinaryDocValues selectedValues; if (nested == null) { selectedValues = sortMode.select(values, nonNullMissingBytes); } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); selectedValues = sortMode.select(values, nonNullMissingBytes, rootDocs, innerDocs, context.reader().maxDoc()); } return selectedValues; } @Override protected Bits getDocsWithField(LeafReaderContext context, String field) throws IOException { return new Bits.MatchAllBits(context.reader().maxDoc()); } @Override protected boolean isNull(int doc, BytesRef term) { return term == nullPlaceHolder; } @Override public void setScorer(Scorer scorer) { BytesRefFieldComparatorSource.this.setScorer(scorer); } }; } /** * A view of a SortedDocValues where missing values * are replaced with the specified term */ // TODO: move this out if we need it for other reasons static class ReplaceMissing extends SortedDocValues { final SortedDocValues in; final int substituteOrd; final BytesRef substituteTerm; final boolean exists; ReplaceMissing(SortedDocValues in, BytesRef term) { this.in = in; this.substituteTerm = term; int sub = in.lookupTerm(term); if (sub < 0) { substituteOrd = -sub-1; exists = false; } else { substituteOrd = sub; exists = true; } } @Override public int getOrd(int docID) { int ord = in.getOrd(docID); if (ord < 0) { return substituteOrd; } else if (exists == false && ord >= substituteOrd) { return ord + 1; } else { return ord; } } @Override public int getValueCount() { if (exists) { return in.getValueCount(); } else { return in.getValueCount() + 1; } } @Override public BytesRef lookupOrd(int ord) { if (ord == substituteOrd) { return substituteTerm; } else if (exists == false && ord > substituteOrd) { return in.lookupOrd(ord-1); } else { return in.lookupOrd(ord); } } // we let termsenum etc fall back to the default implementation } }
/* * Copyright 2001-present Stephen Colebourne * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.joda.beans.sample; import java.io.Serializable; import java.util.List; import java.util.Map; import org.joda.beans.Bean; import org.joda.beans.BeanBuilder; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaBean; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.gen.BeanDefinition; import org.joda.beans.gen.PropertyDefinition; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; /** * Mock JavaBean, used for testing. * * @author Stephen Colebourne */ @BeanDefinition public class DoubleGenericsWithExtendsSuperOneGeneric<T extends Serializable, U extends Number> extends Documentation<T> { /** The normal type. */ @PropertyDefinition private String normalType; /** The type T value. */ @PropertyDefinition private T typeT; /** The type U value. */ @PropertyDefinition private U typeU; /** The type T value. */ @PropertyDefinition private List<T> typeTList; /** The type U value. */ @PropertyDefinition private List<U> typeUList; /** The type T value. */ @PropertyDefinition private T[] typeTArray; /** The type U value. */ @PropertyDefinition private U[] typeUArray; /** * Creates an instance. */ public DoubleGenericsWithExtendsSuperOneGeneric() { } //------------------------- AUTOGENERATED START ------------------------- /** * The meta-bean for {@code DoubleGenericsWithExtendsSuperOneGeneric}. * @return the meta-bean, not null */ @SuppressWarnings("rawtypes") public static DoubleGenericsWithExtendsSuperOneGeneric.Meta meta() { return DoubleGenericsWithExtendsSuperOneGeneric.Meta.INSTANCE; } /** * The meta-bean for {@code DoubleGenericsWithExtendsSuperOneGeneric}. * @param <R> the first generic type * @param <S> the second generic type * @param cls1 the first generic type * @param cls2 the second generic type * @return the meta-bean, not null */ @SuppressWarnings("unchecked") public static <R extends Serializable, S extends Number> DoubleGenericsWithExtendsSuperOneGeneric.Meta<R, S> metaDoubleGenericsWithExtendsSuperOneGeneric(Class<R> cls1, Class<S> cls2) { return DoubleGenericsWithExtendsSuperOneGeneric.Meta.INSTANCE; } static { MetaBean.register(DoubleGenericsWithExtendsSuperOneGeneric.Meta.INSTANCE); } @SuppressWarnings("unchecked") @Override public DoubleGenericsWithExtendsSuperOneGeneric.Meta<T, U> metaBean() { return DoubleGenericsWithExtendsSuperOneGeneric.Meta.INSTANCE; } //----------------------------------------------------------------------- /** * Gets the normal type. * @return the value of the property */ public String getNormalType() { return normalType; } /** * Sets the normal type. * @param normalType the new value of the property */ public void setNormalType(String normalType) { this.normalType = normalType; } /** * Gets the the {@code normalType} property. * @return the property, not null */ public final Property<String> normalType() { return metaBean().normalType().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the type T value. * @return the value of the property */ public T getTypeT() { return typeT; } /** * Sets the type T value. * @param typeT the new value of the property */ public void setTypeT(T typeT) { this.typeT = typeT; } /** * Gets the the {@code typeT} property. * @return the property, not null */ public final Property<T> typeT() { return metaBean().typeT().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the type U value. * @return the value of the property */ public U getTypeU() { return typeU; } /** * Sets the type U value. * @param typeU the new value of the property */ public void setTypeU(U typeU) { this.typeU = typeU; } /** * Gets the the {@code typeU} property. * @return the property, not null */ public final Property<U> typeU() { return metaBean().typeU().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the type T value. * @return the value of the property */ public List<T> getTypeTList() { return typeTList; } /** * Sets the type T value. * @param typeTList the new value of the property */ public void setTypeTList(List<T> typeTList) { this.typeTList = typeTList; } /** * Gets the the {@code typeTList} property. * @return the property, not null */ public final Property<List<T>> typeTList() { return metaBean().typeTList().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the type U value. * @return the value of the property */ public List<U> getTypeUList() { return typeUList; } /** * Sets the type U value. * @param typeUList the new value of the property */ public void setTypeUList(List<U> typeUList) { this.typeUList = typeUList; } /** * Gets the the {@code typeUList} property. * @return the property, not null */ public final Property<List<U>> typeUList() { return metaBean().typeUList().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the type T value. * @return the value of the property */ public T[] getTypeTArray() { return typeTArray; } /** * Sets the type T value. * @param typeTArray the new value of the property */ public void setTypeTArray(T[] typeTArray) { this.typeTArray = typeTArray; } /** * Gets the the {@code typeTArray} property. * @return the property, not null */ public final Property<T[]> typeTArray() { return metaBean().typeTArray().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the type U value. * @return the value of the property */ public U[] getTypeUArray() { return typeUArray; } /** * Sets the type U value. * @param typeUArray the new value of the property */ public void setTypeUArray(U[] typeUArray) { this.typeUArray = typeUArray; } /** * Gets the the {@code typeUArray} property. * @return the property, not null */ public final Property<U[]> typeUArray() { return metaBean().typeUArray().createProperty(this); } //----------------------------------------------------------------------- @Override public DoubleGenericsWithExtendsSuperOneGeneric<T, U> clone() { return JodaBeanUtils.cloneAlways(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { DoubleGenericsWithExtendsSuperOneGeneric<?, ?> other = (DoubleGenericsWithExtendsSuperOneGeneric<?, ?>) obj; return JodaBeanUtils.equal(getNormalType(), other.getNormalType()) && JodaBeanUtils.equal(getTypeT(), other.getTypeT()) && JodaBeanUtils.equal(getTypeU(), other.getTypeU()) && JodaBeanUtils.equal(getTypeTList(), other.getTypeTList()) && JodaBeanUtils.equal(getTypeUList(), other.getTypeUList()) && JodaBeanUtils.equal(getTypeTArray(), other.getTypeTArray()) && JodaBeanUtils.equal(getTypeUArray(), other.getTypeUArray()) && super.equals(obj); } return false; } @Override public int hashCode() { int hash = 7; hash = hash * 31 + JodaBeanUtils.hashCode(getNormalType()); hash = hash * 31 + JodaBeanUtils.hashCode(getTypeT()); hash = hash * 31 + JodaBeanUtils.hashCode(getTypeU()); hash = hash * 31 + JodaBeanUtils.hashCode(getTypeTList()); hash = hash * 31 + JodaBeanUtils.hashCode(getTypeUList()); hash = hash * 31 + JodaBeanUtils.hashCode(getTypeTArray()); hash = hash * 31 + JodaBeanUtils.hashCode(getTypeUArray()); return hash ^ super.hashCode(); } @Override public String toString() { StringBuilder buf = new StringBuilder(256); buf.append("DoubleGenericsWithExtendsSuperOneGeneric{"); int len = buf.length(); toString(buf); if (buf.length() > len) { buf.setLength(buf.length() - 2); } buf.append('}'); return buf.toString(); } @Override protected void toString(StringBuilder buf) { super.toString(buf); buf.append("normalType").append('=').append(JodaBeanUtils.toString(getNormalType())).append(',').append(' '); buf.append("typeT").append('=').append(JodaBeanUtils.toString(getTypeT())).append(',').append(' '); buf.append("typeU").append('=').append(JodaBeanUtils.toString(getTypeU())).append(',').append(' '); buf.append("typeTList").append('=').append(JodaBeanUtils.toString(getTypeTList())).append(',').append(' '); buf.append("typeUList").append('=').append(JodaBeanUtils.toString(getTypeUList())).append(',').append(' '); buf.append("typeTArray").append('=').append(JodaBeanUtils.toString(getTypeTArray())).append(',').append(' '); buf.append("typeUArray").append('=').append(JodaBeanUtils.toString(getTypeUArray())).append(',').append(' '); } //----------------------------------------------------------------------- /** * The meta-bean for {@code DoubleGenericsWithExtendsSuperOneGeneric}. * @param <T> the type * @param <U> the type */ public static class Meta<T extends Serializable, U extends Number> extends Documentation.Meta<T> { /** * The singleton instance of the meta-bean. */ @SuppressWarnings("rawtypes") static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code normalType} property. */ private final MetaProperty<String> normalType = DirectMetaProperty.ofReadWrite( this, "normalType", DoubleGenericsWithExtendsSuperOneGeneric.class, String.class); /** * The meta-property for the {@code typeT} property. */ @SuppressWarnings({"unchecked", "rawtypes" }) private final MetaProperty<T> typeT = (DirectMetaProperty) DirectMetaProperty.ofReadWrite( this, "typeT", DoubleGenericsWithExtendsSuperOneGeneric.class, Object.class); /** * The meta-property for the {@code typeU} property. */ @SuppressWarnings({"unchecked", "rawtypes" }) private final MetaProperty<U> typeU = (DirectMetaProperty) DirectMetaProperty.ofReadWrite( this, "typeU", DoubleGenericsWithExtendsSuperOneGeneric.class, Object.class); /** * The meta-property for the {@code typeTList} property. */ @SuppressWarnings({"unchecked", "rawtypes" }) private final MetaProperty<List<T>> typeTList = DirectMetaProperty.ofReadWrite( this, "typeTList", DoubleGenericsWithExtendsSuperOneGeneric.class, (Class) List.class); /** * The meta-property for the {@code typeUList} property. */ @SuppressWarnings({"unchecked", "rawtypes" }) private final MetaProperty<List<U>> typeUList = DirectMetaProperty.ofReadWrite( this, "typeUList", DoubleGenericsWithExtendsSuperOneGeneric.class, (Class) List.class); /** * The meta-property for the {@code typeTArray} property. */ @SuppressWarnings({"unchecked", "rawtypes" }) private final MetaProperty<T[]> typeTArray = (DirectMetaProperty) DirectMetaProperty.ofReadWrite( this, "typeTArray", DoubleGenericsWithExtendsSuperOneGeneric.class, Object[].class); /** * The meta-property for the {@code typeUArray} property. */ @SuppressWarnings({"unchecked", "rawtypes" }) private final MetaProperty<U[]> typeUArray = (DirectMetaProperty) DirectMetaProperty.ofReadWrite( this, "typeUArray", DoubleGenericsWithExtendsSuperOneGeneric.class, Object[].class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap( this, (DirectMetaPropertyMap) super.metaPropertyMap(), "normalType", "typeT", "typeU", "typeTList", "typeUList", "typeTArray", "typeUArray"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case -1255672639: // normalType return normalType; case 110843994: // typeT return typeT; case 110843995: // typeU return typeU; case 508018712: // typeTList return typeTList; case 508942233: // typeUList return typeUList; case -1441181153: // typeTArray return typeTArray; case -1412552002: // typeUArray return typeUArray; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends DoubleGenericsWithExtendsSuperOneGeneric<T, U>> builder() { return new DirectBeanBuilder<>(new DoubleGenericsWithExtendsSuperOneGeneric<T, U>()); } @SuppressWarnings({"unchecked", "rawtypes" }) @Override public Class<? extends DoubleGenericsWithExtendsSuperOneGeneric<T, U>> beanType() { return (Class) DoubleGenericsWithExtendsSuperOneGeneric.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code normalType} property. * @return the meta-property, not null */ public final MetaProperty<String> normalType() { return normalType; } /** * The meta-property for the {@code typeT} property. * @return the meta-property, not null */ public final MetaProperty<T> typeT() { return typeT; } /** * The meta-property for the {@code typeU} property. * @return the meta-property, not null */ public final MetaProperty<U> typeU() { return typeU; } /** * The meta-property for the {@code typeTList} property. * @return the meta-property, not null */ public final MetaProperty<List<T>> typeTList() { return typeTList; } /** * The meta-property for the {@code typeUList} property. * @return the meta-property, not null */ public final MetaProperty<List<U>> typeUList() { return typeUList; } /** * The meta-property for the {@code typeTArray} property. * @return the meta-property, not null */ public final MetaProperty<T[]> typeTArray() { return typeTArray; } /** * The meta-property for the {@code typeUArray} property. * @return the meta-property, not null */ public final MetaProperty<U[]> typeUArray() { return typeUArray; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case -1255672639: // normalType return ((DoubleGenericsWithExtendsSuperOneGeneric<?, ?>) bean).getNormalType(); case 110843994: // typeT return ((DoubleGenericsWithExtendsSuperOneGeneric<?, ?>) bean).getTypeT(); case 110843995: // typeU return ((DoubleGenericsWithExtendsSuperOneGeneric<?, ?>) bean).getTypeU(); case 508018712: // typeTList return ((DoubleGenericsWithExtendsSuperOneGeneric<?, ?>) bean).getTypeTList(); case 508942233: // typeUList return ((DoubleGenericsWithExtendsSuperOneGeneric<?, ?>) bean).getTypeUList(); case -1441181153: // typeTArray return ((DoubleGenericsWithExtendsSuperOneGeneric<?, ?>) bean).getTypeTArray(); case -1412552002: // typeUArray return ((DoubleGenericsWithExtendsSuperOneGeneric<?, ?>) bean).getTypeUArray(); } return super.propertyGet(bean, propertyName, quiet); } @SuppressWarnings("unchecked") @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case -1255672639: // normalType ((DoubleGenericsWithExtendsSuperOneGeneric<T, U>) bean).setNormalType((String) newValue); return; case 110843994: // typeT ((DoubleGenericsWithExtendsSuperOneGeneric<T, U>) bean).setTypeT((T) newValue); return; case 110843995: // typeU ((DoubleGenericsWithExtendsSuperOneGeneric<T, U>) bean).setTypeU((U) newValue); return; case 508018712: // typeTList ((DoubleGenericsWithExtendsSuperOneGeneric<T, U>) bean).setTypeTList((List<T>) newValue); return; case 508942233: // typeUList ((DoubleGenericsWithExtendsSuperOneGeneric<T, U>) bean).setTypeUList((List<U>) newValue); return; case -1441181153: // typeTArray ((DoubleGenericsWithExtendsSuperOneGeneric<T, U>) bean).setTypeTArray((T[]) newValue); return; case -1412552002: // typeUArray ((DoubleGenericsWithExtendsSuperOneGeneric<T, U>) bean).setTypeUArray((U[]) newValue); return; } super.propertySet(bean, propertyName, newValue, quiet); } } //-------------------------- AUTOGENERATED END -------------------------- }
/* * Copyright 2016 The AppAuth for Android Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package net.openid.appauth; import android.net.Uri; import org.json.JSONException; import org.json.JSONObject; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static net.openid.appauth.TestValues.TEST_APP_REDIRECT_URI; import static net.openid.appauth.TestValues.TEST_APP_SCHEME; import static net.openid.appauth.TestValues.getTestServiceConfig; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; @RunWith(RobolectricTestRunner.class) @Config(sdk = 16) public class RegistrationRequestTest { private static final Map<String, String> TEST_ADDITIONAL_PARAMS; static { TEST_ADDITIONAL_PARAMS = new HashMap<>(); TEST_ADDITIONAL_PARAMS.put("test_key1", "test_value1"); TEST_ADDITIONAL_PARAMS.put("test_key2", "test_value2"); } private static final String TEST_JSON = "{\n" + " \"application_type\": \"" + RegistrationRequest.APPLICATION_TYPE_NATIVE + "\",\n" + " \"redirect_uris\": [\"" + TEST_APP_REDIRECT_URI + "\"],\n" + " \"subject_type\": \"" + RegistrationRequest.SUBJECT_TYPE_PAIRWISE + "\",\n" + " \"response_types\": [\"" + ResponseTypeValues.ID_TOKEN + "\"],\n" + " \"grant_types\": [\"" + GrantTypeValues.IMPLICIT + "\"]\n" + "}"; public static final Uri TEST_JWKS_URI = Uri.parse("https://mydomain/path/keys"); private static final String TEST_JWKS = "{\n" + " \"keys\": [\n" + " {\n" + " \"kty\": \"RSA\",\n" + " \"kid\": \"key1\",\n" + " \"n\": \"AJnc...L0HU=\",\n" + " \"e\": \"AQAB\"\n" + " }\n" + " ]\n" + "}"; private RegistrationRequest.Builder mMinimalRequestBuilder; private RegistrationRequest.Builder mMaximalRequestBuilder; private JSONObject mJson; private List<Uri> mRedirectUris; @Before public void setUp() throws JSONException { mRedirectUris = Arrays.asList(TEST_APP_REDIRECT_URI); mMinimalRequestBuilder = new RegistrationRequest.Builder( getTestServiceConfig(), mRedirectUris); mMaximalRequestBuilder = new RegistrationRequest.Builder( getTestServiceConfig(), mRedirectUris) .setResponseTypeValues(ResponseTypeValues.ID_TOKEN) .setGrantTypeValues(GrantTypeValues.IMPLICIT) .setSubjectType(RegistrationRequest.SUBJECT_TYPE_PAIRWISE); mJson = new JSONObject(TEST_JSON); } @Test public void testBuilder() { assertValues(mMinimalRequestBuilder.build()); } @Test(expected = NullPointerException.class) public void testBuild_nullConfiguration() { new RegistrationRequest.Builder(null, mRedirectUris).build(); } @Test(expected = NullPointerException.class) public void testBuild_nullRedirectUri() { new RegistrationRequest.Builder(getTestServiceConfig(), null) .build(); } @Test public void testBuilder_setRedirectUriValues() { Uri redirect1 = Uri.parse(TEST_APP_SCHEME + ":/callback1"); Uri redirect2 = Uri.parse(TEST_APP_SCHEME + ":/callback2"); mMinimalRequestBuilder.setRedirectUriValues(redirect1, redirect2); RegistrationRequest request = mMinimalRequestBuilder.build(); assertThat(request.redirectUris.containsAll(Arrays.asList(redirect1, redirect2))).isTrue(); } @Test(expected = IllegalArgumentException.class) public void testBuilder_setAdditionalParams_withBuiltInParam() { Map<String, String> additionalParams = new HashMap<>(); additionalParams.put(RegistrationRequest.PARAM_APPLICATION_TYPE, "web"); mMinimalRequestBuilder.setAdditionalParameters(additionalParams); } @Test public void testApplicationTypeIsNativeByDefault() { RegistrationRequest request = mMinimalRequestBuilder.build(); assertThat(request.applicationType).isEqualTo(RegistrationRequest.APPLICATION_TYPE_NATIVE); } @Test public void testToJsonString_withAdditionalParameters() throws JSONException { RegistrationRequest request = mMinimalRequestBuilder .setAdditionalParameters(TEST_ADDITIONAL_PARAMS) .build(); String jsonStr = request.toJsonString(); JSONObject json = new JSONObject(jsonStr); for (Map.Entry<String, String> param : TEST_ADDITIONAL_PARAMS.entrySet()) { assertThat(json.get(param.getKey())).isEqualTo(param.getValue()); } assertThat(request.applicationType).isEqualTo(RegistrationRequest.APPLICATION_TYPE_NATIVE); } @Test public void testToJsonString() throws JSONException { RegistrationRequest request = mMaximalRequestBuilder.build(); String jsonStr = request.toJsonString(); assertMaximalValuesInJson(request, new JSONObject(jsonStr)); } @Test public void testToJsonString_withJwksUri() throws JSONException { RegistrationRequest request = mMinimalRequestBuilder .setJwksUri(TEST_JWKS_URI) .build(); String jsonStr = request.toJsonString(); JSONObject json = new JSONObject(jsonStr); assertThat(Uri.parse(json.getString(RegistrationRequest.PARAM_JWKS_URI))) .isEqualTo(TEST_JWKS_URI); } @Test public void testToJsonString_withJwks() throws JSONException { RegistrationRequest request = mMinimalRequestBuilder .setJwks(new JSONObject(TEST_JWKS)) .build(); assertThat(request.jwks).isNotNull(); String jsonStr = request.toJsonString(); JSONObject json = new JSONObject(jsonStr); assertThat(json.getJSONObject(RegistrationRequest.PARAM_JWKS).toString()) .isEqualTo(request.jwks.toString()); } @Test public void testSerialize() throws JSONException { RegistrationRequest request = mMaximalRequestBuilder.build(); JSONObject json = request.jsonSerialize(); assertMaximalValuesInJson(request, json); assertThat(json.getJSONObject(RegistrationRequest.KEY_CONFIGURATION).toString()) .isEqualTo(request.configuration.toJson().toString()); } @Test public void testSerialize_withAdditionalParameters() throws JSONException { Map<String, String> additionalParameters = Collections.singletonMap("test1", "value1"); RegistrationRequest request = mMaximalRequestBuilder .setAdditionalParameters(additionalParameters).build(); JSONObject json = request.jsonSerialize(); assertMaximalValuesInJson(request, json); assertThat(JsonUtil.getStringMap(json, RegistrationRequest.KEY_ADDITIONAL_PARAMETERS)) .isEqualTo(additionalParameters); } @Test public void testDeserialize() throws JSONException { mJson.put(RegistrationRequest.KEY_CONFIGURATION, getTestServiceConfig().toJson()); RegistrationRequest request = RegistrationRequest.jsonDeserialize(mJson); assertThat(request.configuration.toJsonString()) .isEqualTo(getTestServiceConfig().toJsonString()); assertMaximalValuesInJson(request, mJson); } @Test public void testDeserialize_withAdditionalParameters() throws JSONException { mJson.put(RegistrationRequest.KEY_CONFIGURATION, getTestServiceConfig().toJson()); Map<String, String> additionalParameters = new HashMap<>(); additionalParameters.put("key1", "value1"); additionalParameters.put("key2", "value2"); mJson.put(RegistrationRequest.KEY_ADDITIONAL_PARAMETERS, JsonUtil.mapToJsonObject(additionalParameters)); RegistrationRequest request = RegistrationRequest.jsonDeserialize(mJson); assertThat(request.additionalParameters).isEqualTo(additionalParameters); } private void assertValues(RegistrationRequest request) { assertEquals("unexpected redirect URI", TEST_APP_REDIRECT_URI, request.redirectUris.iterator().next()); assertEquals("unexpected application type", RegistrationRequest.APPLICATION_TYPE_NATIVE, request.applicationType); } private void assertMaximalValuesInJson(RegistrationRequest request, JSONObject json) throws JSONException { assertThat(json.get(RegistrationRequest.PARAM_REDIRECT_URIS)) .isEqualTo(JsonUtil.toJsonArray(request.redirectUris)); assertThat(json.get(RegistrationRequest.PARAM_APPLICATION_TYPE)) .isEqualTo(RegistrationRequest.APPLICATION_TYPE_NATIVE); assertThat(json.get(RegistrationRequest.PARAM_RESPONSE_TYPES)) .isEqualTo(JsonUtil.toJsonArray(request.responseTypes)); assertThat(json.get(RegistrationRequest.PARAM_GRANT_TYPES)) .isEqualTo(JsonUtil.toJsonArray(request.grantTypes)); assertThat(json.get(RegistrationRequest.PARAM_SUBJECT_TYPE)) .isEqualTo(request.subjectType); } }
package skylin.server; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.Enumeration; import java.util.EventListener; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.Vector; import javax.servlet.Filter; import javax.servlet.FilterRegistration; import javax.servlet.FilterRegistration.Dynamic; import javax.servlet.RequestDispatcher; import javax.servlet.Servlet; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRegistration; import javax.servlet.SessionCookieConfig; import javax.servlet.SessionTrackingMode; import javax.servlet.descriptor.JspConfigDescriptor; public class SkylinServletContext implements ServletContext{ @Override public Dynamic addFilter(String arg0, String arg1) { // TODO Auto-generated method stub return null; } @Override public Dynamic addFilter(String arg0, Filter arg1) { // TODO Auto-generated method stub return null; } @Override public Dynamic addFilter(String arg0, Class<? extends Filter> arg1) { // TODO Auto-generated method stub return null; } @Override public void addListener(String arg0) { // TODO Auto-generated method stub } @Override public <T extends EventListener> void addListener(T arg0) { // TODO Auto-generated method stub } @Override public void addListener(Class<? extends EventListener> arg0) { // TODO Auto-generated method stub } @Override public javax.servlet.ServletRegistration.Dynamic addServlet(String arg0, String arg1) { // TODO Auto-generated method stub return null; } @Override public javax.servlet.ServletRegistration.Dynamic addServlet(String arg0, Servlet arg1) { // TODO Auto-generated method stub return null; } @Override public javax.servlet.ServletRegistration.Dynamic addServlet(String arg0, Class<? extends Servlet> arg1) { // TODO Auto-generated method stub return null; } @Override public <T extends Filter> T createFilter(Class<T> arg0) throws ServletException { // TODO Auto-generated method stub return null; } @Override public <T extends EventListener> T createListener(Class<T> arg0) throws ServletException { // TODO Auto-generated method stub return null; } @Override public <T extends Servlet> T createServlet(Class<T> arg0) throws ServletException { // TODO Auto-generated method stub return null; } @Override public void declareRoles(String... arg0) { // TODO Auto-generated method stub } @Override public Object getAttribute(String arg0) { // TODO Auto-generated method stub return null; } @Override public Enumeration<String> getAttributeNames() { // TODO Auto-generated method stub return null; } @Override public ClassLoader getClassLoader() { // TODO Auto-generated method stub return null; } @Override public ServletContext getContext(String arg0) { // TODO Auto-generated method stub return null; } @Override public String getContextPath() { // TODO Auto-generated method stub return null; } @Override public Set<SessionTrackingMode> getDefaultSessionTrackingModes() { // TODO Auto-generated method stub return null; } @Override public int getEffectiveMajorVersion() { // TODO Auto-generated method stub return 0; } @Override public int getEffectiveMinorVersion() { // TODO Auto-generated method stub return 0; } @Override public Set<SessionTrackingMode> getEffectiveSessionTrackingModes() { // TODO Auto-generated method stub return null; } @Override public FilterRegistration getFilterRegistration(String arg0) { // TODO Auto-generated method stub return null; } @Override public Map<String, ? extends FilterRegistration> getFilterRegistrations() { // TODO Auto-generated method stub return null; } public HashMap<String,String> params = new HashMap<String,String>(); @Override public String getInitParameter(String param) { return params.get(param); } @Override public Enumeration<String> getInitParameterNames() { return new Vector(params.keySet()).elements(); } @Override public JspConfigDescriptor getJspConfigDescriptor() { // TODO Auto-generated method stub return null; } @Override public int getMajorVersion() { // TODO Auto-generated method stub return 0; } @Override public String getMimeType(String arg0) { // TODO Auto-generated method stub return null; } @Override public int getMinorVersion() { // TODO Auto-generated method stub return 0; } @Override public RequestDispatcher getNamedDispatcher(String arg0) { // TODO Auto-generated method stub return null; } @Override public String getRealPath(String arg0) { // TODO Auto-generated method stub return null; } @Override public RequestDispatcher getRequestDispatcher(String arg0) { // TODO Auto-generated method stub return null; } @Override public URL getResource(String arg0) throws MalformedURLException { // TODO Auto-generated method stub return null; } @Override public InputStream getResourceAsStream(String arg0) { // TODO Auto-generated method stub return null; } @Override public Set<String> getResourcePaths(String arg0) { // TODO Auto-generated method stub return null; } @Override public String getServerInfo() { // TODO Auto-generated method stub return null; } @Override public Servlet getServlet(String arg0) throws ServletException { // TODO Auto-generated method stub return null; } @Override public String getServletContextName() { // TODO Auto-generated method stub return null; } @Override public Enumeration<String> getServletNames() { // TODO Auto-generated method stub return null; } @Override public ServletRegistration getServletRegistration(String arg0) { // TODO Auto-generated method stub return null; } @Override public Map<String, ? extends ServletRegistration> getServletRegistrations() { // TODO Auto-generated method stub return null; } @Override public Enumeration<Servlet> getServlets() { // TODO Auto-generated method stub return null; } @Override public SessionCookieConfig getSessionCookieConfig() { // TODO Auto-generated method stub return null; } @Override public void log(String arg0) { // TODO Auto-generated method stub } @Override public void log(Exception arg0, String arg1) { // TODO Auto-generated method stub } @Override public void log(String arg0, Throwable arg1) { // TODO Auto-generated method stub } @Override public void removeAttribute(String arg0) { // TODO Auto-generated method stub } @Override public void setAttribute(String arg0, Object arg1) { // TODO Auto-generated method stub } @Override public boolean setInitParameter(String arg0, String arg1) { // TODO Auto-generated method stub return false; } @Override public void setSessionTrackingModes(Set<SessionTrackingMode> arg0) throws IllegalStateException, IllegalArgumentException { // TODO Auto-generated method stub } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.SimpleFSDirectory; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.CoordinationMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.gateway.PersistedClusterStateService.Writer; import org.elasticsearch.index.Index; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOError; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.nullValue; public class PersistedClusterStateServiceTests extends ESTestCase { private PersistedClusterStateService newPersistedClusterStateService(NodeEnvironment nodeEnvironment) { return new PersistedClusterStateService(nodeEnvironment, xContentRegistry(), getBigArrays(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), () -> 0L); } public void testPersistsAndReloadsTerm() throws IOException { try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { final PersistedClusterStateService persistedClusterStateService = newPersistedClusterStateService(nodeEnvironment); final long newTerm = randomNonNegativeLong(); assertThat(persistedClusterStateService.loadBestOnDiskState().currentTerm, equalTo(0L)); try (Writer writer = persistedClusterStateService.createWriter()) { writer.writeFullStateAndCommit(newTerm, ClusterState.EMPTY_STATE); assertThat(persistedClusterStateService.loadBestOnDiskState().currentTerm, equalTo(newTerm)); } assertThat(persistedClusterStateService.loadBestOnDiskState().currentTerm, equalTo(newTerm)); } } public void testPersistsAndReloadsGlobalMetadata() throws IOException { try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { final PersistedClusterStateService persistedClusterStateService = newPersistedClusterStateService(nodeEnvironment); final String clusterUUID = UUIDs.randomBase64UUID(random()); final long version = randomLongBetween(1L, Long.MAX_VALUE); ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); try (Writer writer = persistedClusterStateService.createWriter()) { writer.writeFullStateAndCommit(0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .clusterUUID(clusterUUID) .clusterUUIDCommitted(true) .version(version)) .incrementVersion().build()); clusterState = loadPersistedClusterState(persistedClusterStateService); assertThat(clusterState.metadata().clusterUUID(), equalTo(clusterUUID)); assertTrue(clusterState.metadata().clusterUUIDCommitted()); assertThat(clusterState.metadata().version(), equalTo(version)); } try (Writer writer = persistedClusterStateService.createWriter()) { writer.writeFullStateAndCommit(0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .clusterUUID(clusterUUID) .clusterUUIDCommitted(true) .version(version + 1)) .incrementVersion().build()); } clusterState = loadPersistedClusterState(persistedClusterStateService); assertThat(clusterState.metadata().clusterUUID(), equalTo(clusterUUID)); assertTrue(clusterState.metadata().clusterUUIDCommitted()); assertThat(clusterState.metadata().version(), equalTo(version + 1)); } } private static void writeState(Writer writer, long currentTerm, ClusterState clusterState, ClusterState previousState) throws IOException { if (randomBoolean() || clusterState.term() != previousState.term() || writer.fullStateWritten == false) { writer.writeFullStateAndCommit(currentTerm, clusterState); } else { writer.writeIncrementalStateAndCommit(currentTerm, previousState, clusterState); } } public void testLoadsFreshestState() throws IOException { final Path[] dataPaths = createDataPaths(); final long freshTerm = randomLongBetween(1L, Long.MAX_VALUE); final long staleTerm = randomBoolean() ? freshTerm : randomLongBetween(1L, freshTerm); final long freshVersion = randomLongBetween(2L, Long.MAX_VALUE); final long staleVersion = staleTerm == freshTerm ? randomLongBetween(1L, freshVersion - 1) : randomLongBetween(1L, Long.MAX_VALUE); final HashSet<Path> unimportantPaths = Arrays.stream(dataPaths).collect(Collectors.toCollection(HashSet::new)); try (NodeEnvironment nodeEnvironment = newNodeEnvironment(dataPaths)) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { writeState(writer, staleTerm, ClusterState.builder(clusterState).version(staleVersion) .metadata(Metadata.builder(clusterState.metadata()).coordinationMetadata( CoordinationMetadata.builder(clusterState.coordinationMetadata()).term(staleTerm).build())).build(), clusterState); } } try (NodeEnvironment nodeEnvironment = newNodeEnvironment(new Path[]{randomFrom(dataPaths)})) { unimportantPaths.remove(nodeEnvironment.nodeDataPaths()[0]); try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); writeState(writer, freshTerm, ClusterState.builder(clusterState).version(freshVersion) .metadata(Metadata.builder(clusterState.metadata()).coordinationMetadata( CoordinationMetadata.builder(clusterState.coordinationMetadata()).term(freshTerm).build())).build(), clusterState); } } if (randomBoolean() && unimportantPaths.isEmpty() == false) { IOUtils.rm(randomFrom(unimportantPaths)); } // verify that the freshest state is chosen try (NodeEnvironment nodeEnvironment = newNodeEnvironment(dataPaths)) { final PersistedClusterStateService.OnDiskState onDiskState = newPersistedClusterStateService(nodeEnvironment) .loadBestOnDiskState(); final ClusterState clusterState = clusterStateFromMetadata(onDiskState.lastAcceptedVersion, onDiskState.metadata); assertThat(clusterState.term(), equalTo(freshTerm)); assertThat(clusterState.version(), equalTo(freshVersion)); } } public void testFailsOnMismatchedNodeIds() throws IOException { final Path[] dataPaths1 = createDataPaths(); final Path[] dataPaths2 = createDataPaths(); final String[] nodeIds = new String[2]; try (NodeEnvironment nodeEnvironment = newNodeEnvironment(dataPaths1)) { nodeIds[0] = nodeEnvironment.nodeId(); try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); writer.writeFullStateAndCommit(0L, ClusterState.builder(clusterState).version(randomLongBetween(1L, Long.MAX_VALUE)).build()); } } try (NodeEnvironment nodeEnvironment = newNodeEnvironment(dataPaths2)) { nodeIds[1] = nodeEnvironment.nodeId(); try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); writer.writeFullStateAndCommit(0L, ClusterState.builder(clusterState).version(randomLongBetween(1L, Long.MAX_VALUE)).build()); } } NodeMetadata.FORMAT.cleanupOldFiles(Long.MAX_VALUE, dataPaths2); final Path[] combinedPaths = Stream.concat(Arrays.stream(dataPaths1), Arrays.stream(dataPaths2)).toArray(Path[]::new); final String failure = expectThrows(IllegalStateException.class, () -> newNodeEnvironment(combinedPaths)).getMessage(); assertThat(failure, allOf(containsString("unexpected node ID in metadata"), containsString(nodeIds[0]), containsString(nodeIds[1]))); assertTrue("[" + failure + "] should match " + Arrays.toString(dataPaths2), Arrays.stream(dataPaths2).anyMatch(p -> failure.contains(p.toString()))); // verify that loadBestOnDiskState has same check final String message = expectThrows(IllegalStateException.class, () -> new PersistedClusterStateService(combinedPaths, nodeIds[0], xContentRegistry(), BigArrays.NON_RECYCLING_INSTANCE, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), () -> 0L).loadBestOnDiskState()).getMessage(); assertThat(message, allOf(containsString("unexpected node ID in metadata"), containsString(nodeIds[0]), containsString(nodeIds[1]))); assertTrue("[" + message + "] should match " + Arrays.toString(dataPaths2), Arrays.stream(dataPaths2).anyMatch(p -> message.contains(p.toString()))); } public void testFailsOnMismatchedCommittedClusterUUIDs() throws IOException { final Path[] dataPaths1 = createDataPaths(); final Path[] dataPaths2 = createDataPaths(); final Path[] combinedPaths = Stream.concat(Arrays.stream(dataPaths1), Arrays.stream(dataPaths2)).toArray(Path[]::new); final String clusterUUID1 = UUIDs.randomBase64UUID(random()); final String clusterUUID2 = UUIDs.randomBase64UUID(random()); // first establish consistent node IDs and write initial metadata try (NodeEnvironment nodeEnvironment = newNodeEnvironment(combinedPaths)) { try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); assertFalse(clusterState.metadata().clusterUUIDCommitted()); writer.writeFullStateAndCommit(0L, clusterState); } } try (NodeEnvironment nodeEnvironment = newNodeEnvironment(dataPaths1)) { try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); assertFalse(clusterState.metadata().clusterUUIDCommitted()); writer.writeFullStateAndCommit(0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .clusterUUID(clusterUUID1) .clusterUUIDCommitted(true) .version(1)) .incrementVersion().build()); } } try (NodeEnvironment nodeEnvironment = newNodeEnvironment(dataPaths2)) { try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); assertFalse(clusterState.metadata().clusterUUIDCommitted()); writer.writeFullStateAndCommit(0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .clusterUUID(clusterUUID2) .clusterUUIDCommitted(true) .version(1)) .incrementVersion().build()); } } try (NodeEnvironment nodeEnvironment = newNodeEnvironment(combinedPaths)) { final String message = expectThrows(IllegalStateException.class, () -> newPersistedClusterStateService(nodeEnvironment).loadBestOnDiskState()).getMessage(); assertThat(message, allOf(containsString("mismatched cluster UUIDs in metadata"), containsString(clusterUUID1), containsString(clusterUUID2))); assertTrue("[" + message + "] should match " + Arrays.toString(dataPaths1), Arrays.stream(dataPaths1).anyMatch(p -> message.contains(p.toString()))); assertTrue("[" + message + "] should match " + Arrays.toString(dataPaths2), Arrays.stream(dataPaths2).anyMatch(p -> message.contains(p.toString()))); } } public void testFailsIfFreshestStateIsInStaleTerm() throws IOException { final Path[] dataPaths1 = createDataPaths(); final Path[] dataPaths2 = createDataPaths(); final Path[] combinedPaths = Stream.concat(Arrays.stream(dataPaths1), Arrays.stream(dataPaths2)).toArray(Path[]::new); final long staleCurrentTerm = randomLongBetween(1L, Long.MAX_VALUE - 1); final long freshCurrentTerm = randomLongBetween(staleCurrentTerm + 1, Long.MAX_VALUE); final long freshTerm = randomLongBetween(1L, Long.MAX_VALUE); final long staleTerm = randomBoolean() ? freshTerm : randomLongBetween(1L, freshTerm); final long freshVersion = randomLongBetween(2L, Long.MAX_VALUE); final long staleVersion = staleTerm == freshTerm ? randomLongBetween(1L, freshVersion - 1) : randomLongBetween(1L, Long.MAX_VALUE); try (NodeEnvironment nodeEnvironment = newNodeEnvironment(combinedPaths)) { try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); assertFalse(clusterState.metadata().clusterUUIDCommitted()); writeState(writer, staleCurrentTerm, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()).version(1) .coordinationMetadata(CoordinationMetadata.builder(clusterState.coordinationMetadata()).term(staleTerm).build())) .version(staleVersion) .build(), clusterState); } } try (NodeEnvironment nodeEnvironment = newNodeEnvironment(dataPaths1)) { try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); writeState(writer, freshCurrentTerm, clusterState, clusterState); } } try (NodeEnvironment nodeEnvironment = newNodeEnvironment(dataPaths2)) { try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final PersistedClusterStateService.OnDiskState onDiskState = newPersistedClusterStateService(nodeEnvironment) .loadBestOnDiskState(); final ClusterState clusterState = clusterStateFromMetadata(onDiskState.lastAcceptedVersion, onDiskState.metadata); writeState(writer, onDiskState.currentTerm, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()).version(2) .coordinationMetadata(CoordinationMetadata.builder(clusterState.coordinationMetadata()).term(freshTerm).build())) .version(freshVersion) .build(), clusterState); } } try (NodeEnvironment nodeEnvironment = newNodeEnvironment(combinedPaths)) { final String message = expectThrows(IllegalStateException.class, () -> newPersistedClusterStateService(nodeEnvironment).loadBestOnDiskState()).getMessage(); assertThat(message, allOf( containsString("inconsistent terms found"), containsString(Long.toString(staleCurrentTerm)), containsString(Long.toString(freshCurrentTerm)))); assertTrue("[" + message + "] should match " + Arrays.toString(dataPaths1), Arrays.stream(dataPaths1).anyMatch(p -> message.contains(p.toString()))); assertTrue("[" + message + "] should match " + Arrays.toString(dataPaths2), Arrays.stream(dataPaths2).anyMatch(p -> message.contains(p.toString()))); } } public void testFailsGracefullyOnExceptionDuringFlush() throws IOException { final AtomicBoolean throwException = new AtomicBoolean(); try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { final PersistedClusterStateService persistedClusterStateService = new PersistedClusterStateService(nodeEnvironment, xContentRegistry(), getBigArrays(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), () -> 0L) { @Override Directory createDirectory(Path path) throws IOException { return new FilterDirectory(super.createDirectory(path)) { @Override public IndexOutput createOutput(String name, IOContext context) throws IOException { if (throwException.get()) { throw new IOException("simulated"); } return super.createOutput(name, context); } }; } }; try (Writer writer = persistedClusterStateService.createWriter()) { final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); final long newTerm = randomNonNegativeLong(); final ClusterState newState = ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .clusterUUID(UUIDs.randomBase64UUID(random())) .clusterUUIDCommitted(true) .version(randomLongBetween(1L, Long.MAX_VALUE))) .incrementVersion().build(); throwException.set(true); assertThat(expectThrows(IOException.class, () -> writeState(writer, newTerm, newState, clusterState)).getMessage(), containsString("simulated")); } } } public void testClosesWriterOnFatalError() throws IOException { final AtomicBoolean throwException = new AtomicBoolean(); try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { final PersistedClusterStateService persistedClusterStateService = new PersistedClusterStateService(nodeEnvironment, xContentRegistry(), getBigArrays(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), () -> 0L) { @Override Directory createDirectory(Path path) throws IOException { return new FilterDirectory(super.createDirectory(path)) { @Override public void sync(Collection<String> names) { throw new OutOfMemoryError("simulated"); } }; } }; try (Writer writer = persistedClusterStateService.createWriter()) { final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); final long newTerm = randomNonNegativeLong(); final ClusterState newState = ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .clusterUUID(UUIDs.randomBase64UUID(random())) .clusterUUIDCommitted(true) .version(randomLongBetween(1L, Long.MAX_VALUE))) .incrementVersion().build(); throwException.set(true); assertThat(expectThrows(OutOfMemoryError.class, () -> { if (randomBoolean()) { writeState(writer, newTerm, newState, clusterState); } else { writer.commit(newTerm, newState.version()); } }).getMessage(), containsString("simulated")); assertFalse(writer.isOpen()); } // check if we can open writer again try (Writer ignored = persistedClusterStateService.createWriter()) { } } } public void testCrashesWithIOErrorOnCommitFailure() throws IOException { final AtomicBoolean throwException = new AtomicBoolean(); try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { final PersistedClusterStateService persistedClusterStateService = new PersistedClusterStateService(nodeEnvironment, xContentRegistry(), getBigArrays(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), () -> 0L) { @Override Directory createDirectory(Path path) throws IOException { return new FilterDirectory(super.createDirectory(path)) { @Override public void rename(String source, String dest) throws IOException { if (throwException.get() && dest.startsWith("segments")) { throw new IOException("simulated"); } } }; } }; try (Writer writer = persistedClusterStateService.createWriter()) { final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); final long newTerm = randomNonNegativeLong(); final ClusterState newState = ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .clusterUUID(UUIDs.randomBase64UUID(random())) .clusterUUIDCommitted(true) .version(randomLongBetween(1L, Long.MAX_VALUE))) .incrementVersion().build(); throwException.set(true); assertThat(expectThrows(IOError.class, () -> { if (randomBoolean()) { writeState(writer, newTerm, newState, clusterState); } else { writer.commit(newTerm, newState.version()); } }).getMessage(), containsString("simulated")); assertFalse(writer.isOpen()); } // check if we can open writer again try (Writer ignored = persistedClusterStateService.createWriter()) { } } } public void testFailsIfGlobalMetadataIsMissing() throws IOException { // if someone attempted surgery on the metadata index by hand, e.g. deleting broken segments, then maybe the global metadata // isn't there any more try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); writeState(writer, 0L, ClusterState.builder(clusterState).version(randomLongBetween(1L, Long.MAX_VALUE)).build(), clusterState); } final Path brokenPath = randomFrom(nodeEnvironment.nodeDataPaths()); try (Directory directory = new SimpleFSDirectory(brokenPath.resolve(PersistedClusterStateService.METADATA_DIRECTORY_NAME))) { final IndexWriterConfig indexWriterConfig = new IndexWriterConfig(); indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE); try (IndexWriter indexWriter = new IndexWriter(directory, indexWriterConfig)) { indexWriter.commit(); } } final String message = expectThrows(IllegalStateException.class, () -> newPersistedClusterStateService(nodeEnvironment).loadBestOnDiskState()).getMessage(); assertThat(message, allOf(containsString("no global metadata found"), containsString(brokenPath.toString()))); } } public void testFailsIfGlobalMetadataIsDuplicated() throws IOException { // if someone attempted surgery on the metadata index by hand, e.g. deleting broken segments, then maybe the global metadata // is duplicated final Path[] dataPaths1 = createDataPaths(); final Path[] dataPaths2 = createDataPaths(); final Path[] combinedPaths = Stream.concat(Arrays.stream(dataPaths1), Arrays.stream(dataPaths2)).toArray(Path[]::new); try (NodeEnvironment nodeEnvironment = newNodeEnvironment(combinedPaths)) { try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); writeState(writer, 0L, ClusterState.builder(clusterState).version(randomLongBetween(1L, Long.MAX_VALUE)).build(), clusterState); } final Path brokenPath = randomFrom(nodeEnvironment.nodeDataPaths()); final Path dupPath = randomValueOtherThan(brokenPath, () -> randomFrom(nodeEnvironment.nodeDataPaths())); try (Directory directory = new SimpleFSDirectory(brokenPath.resolve(PersistedClusterStateService.METADATA_DIRECTORY_NAME)); Directory dupDirectory = new SimpleFSDirectory(dupPath.resolve(PersistedClusterStateService.METADATA_DIRECTORY_NAME))) { try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig())) { indexWriter.addIndexes(dupDirectory); indexWriter.commit(); } } final String message = expectThrows(IllegalStateException.class, () -> newPersistedClusterStateService(nodeEnvironment).loadBestOnDiskState()).getMessage(); assertThat(message, allOf(containsString("duplicate global metadata found"), containsString(brokenPath.toString()))); } } public void testFailsIfIndexMetadataIsDuplicated() throws IOException { // if someone attempted surgery on the metadata index by hand, e.g. deleting broken segments, then maybe some index metadata // is duplicated final Path[] dataPaths1 = createDataPaths(); final Path[] dataPaths2 = createDataPaths(); final Path[] combinedPaths = Stream.concat(Arrays.stream(dataPaths1), Arrays.stream(dataPaths2)).toArray(Path[]::new); try (NodeEnvironment nodeEnvironment = newNodeEnvironment(combinedPaths)) { final String indexUUID = UUIDs.randomBase64UUID(random()); final String indexName = randomAlphaOfLength(10); try (Writer writer = newPersistedClusterStateService(nodeEnvironment).createWriter()) { final ClusterState clusterState = loadPersistedClusterState(newPersistedClusterStateService(nodeEnvironment)); writeState(writer, 0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .version(1L) .coordinationMetadata(CoordinationMetadata.builder(clusterState.coordinationMetadata()).term(1L).build()) .put(IndexMetadata.builder(indexName) .version(1L) .settings(Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_INDEX_UUID, indexUUID)))) .incrementVersion().build(), clusterState); } final Path brokenPath = randomFrom(nodeEnvironment.nodeDataPaths()); final Path dupPath = randomValueOtherThan(brokenPath, () -> randomFrom(nodeEnvironment.nodeDataPaths())); try (Directory directory = new SimpleFSDirectory(brokenPath.resolve(PersistedClusterStateService.METADATA_DIRECTORY_NAME)); Directory dupDirectory = new SimpleFSDirectory(dupPath.resolve(PersistedClusterStateService.METADATA_DIRECTORY_NAME))) { try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig())) { indexWriter.deleteDocuments(new Term("type", "global")); // do not duplicate global metadata indexWriter.addIndexes(dupDirectory); indexWriter.commit(); } } final String message = expectThrows(IllegalStateException.class, () -> newPersistedClusterStateService(nodeEnvironment).loadBestOnDiskState()).getMessage(); assertThat(message, allOf( containsString("duplicate metadata found"), containsString(brokenPath.toString()), containsString(indexName), containsString(indexUUID))); } } public void testPersistsAndReloadsIndexMetadataIffVersionOrTermChanges() throws IOException { try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { final PersistedClusterStateService persistedClusterStateService = newPersistedClusterStateService(nodeEnvironment); final long globalVersion = randomLongBetween(1L, Long.MAX_VALUE); final String indexUUID = UUIDs.randomBase64UUID(random()); final long indexMetadataVersion = randomLongBetween(1L, Long.MAX_VALUE); final long oldTerm = randomLongBetween(1L, Long.MAX_VALUE - 1); final long newTerm = randomLongBetween(oldTerm + 1, Long.MAX_VALUE); try (Writer writer = persistedClusterStateService.createWriter()) { ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); writeState(writer, 0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .version(globalVersion) .coordinationMetadata(CoordinationMetadata.builder(clusterState.coordinationMetadata()).term(oldTerm).build()) .put(IndexMetadata.builder("test") .version(indexMetadataVersion - 1) // -1 because it's incremented in .put() .settings(Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_INDEX_UUID, indexUUID)))) .incrementVersion().build(), clusterState); clusterState = loadPersistedClusterState(persistedClusterStateService); IndexMetadata indexMetadata = clusterState.metadata().index("test"); assertThat(indexMetadata.getIndexUUID(), equalTo(indexUUID)); assertThat(indexMetadata.getVersion(), equalTo(indexMetadataVersion)); assertThat(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.get(indexMetadata.getSettings()), equalTo(0)); // ensure we do not wastefully persist the same index metadata version by making a bad update with the same version writer.writeIncrementalStateAndCommit(0L, clusterState, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .put(IndexMetadata.builder(indexMetadata).settings(Settings.builder() .put(indexMetadata.getSettings()) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1)).build(), false)) .incrementVersion().build()); clusterState = loadPersistedClusterState(persistedClusterStateService); indexMetadata = clusterState.metadata().index("test"); assertThat(indexMetadata.getIndexUUID(), equalTo(indexUUID)); assertThat(indexMetadata.getVersion(), equalTo(indexMetadataVersion)); assertThat(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.get(indexMetadata.getSettings()), equalTo(0)); // ensure that we do persist the same index metadata version by making an update with a higher version writeState(writer, 0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .put(IndexMetadata.builder(indexMetadata).settings(Settings.builder() .put(indexMetadata.getSettings()) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 2)).build(), true)) .incrementVersion().build(), clusterState); clusterState = loadPersistedClusterState(persistedClusterStateService); indexMetadata = clusterState.metadata().index("test"); assertThat(indexMetadata.getVersion(), equalTo(indexMetadataVersion + 1)); assertThat(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.get(indexMetadata.getSettings()), equalTo(2)); // ensure that we also persist the index metadata when the term changes writeState(writer, 0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .coordinationMetadata(CoordinationMetadata.builder(clusterState.coordinationMetadata()).term(newTerm).build()) .put(IndexMetadata.builder(indexMetadata).settings(Settings.builder() .put(indexMetadata.getSettings()) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 3)).build(), false)) .incrementVersion().build(), clusterState); } final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); final IndexMetadata indexMetadata = clusterState.metadata().index("test"); assertThat(indexMetadata.getIndexUUID(), equalTo(indexUUID)); assertThat(indexMetadata.getVersion(), equalTo(indexMetadataVersion + 1)); assertThat(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.get(indexMetadata.getSettings()), equalTo(3)); } } public void testPersistsAndReloadsIndexMetadataForMultipleIndices() throws IOException { try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { final PersistedClusterStateService persistedClusterStateService = newPersistedClusterStateService(nodeEnvironment); final long term = randomLongBetween(1L, Long.MAX_VALUE); final String addedIndexUuid = UUIDs.randomBase64UUID(random()); final String updatedIndexUuid = UUIDs.randomBase64UUID(random()); final String deletedIndexUuid = UUIDs.randomBase64UUID(random()); try (Writer writer = persistedClusterStateService.createWriter()) { final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); writeState(writer, 0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .version(clusterState.metadata().version() + 1) .coordinationMetadata(CoordinationMetadata.builder(clusterState.coordinationMetadata()).term(term).build()) .put(IndexMetadata.builder("updated") .version(randomLongBetween(0L, Long.MAX_VALUE - 1) - 1) // -1 because it's incremented in .put() .settings(Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1) .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_INDEX_UUID, updatedIndexUuid))) .put(IndexMetadata.builder("deleted") .version(randomLongBetween(0L, Long.MAX_VALUE - 1) - 1) // -1 because it's incremented in .put() .settings(Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1) .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_INDEX_UUID, deletedIndexUuid)))) .incrementVersion().build(), clusterState); } try (Writer writer = persistedClusterStateService.createWriter()) { final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); assertThat(clusterState.metadata().indices().size(), equalTo(2)); assertThat(clusterState.metadata().index("updated").getIndexUUID(), equalTo(updatedIndexUuid)); assertThat(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.get(clusterState.metadata().index("updated").getSettings()), equalTo(1)); assertThat(clusterState.metadata().index("deleted").getIndexUUID(), equalTo(deletedIndexUuid)); writeState(writer, 0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .version(clusterState.metadata().version() + 1) .remove("deleted") .put(IndexMetadata.builder("updated") .settings(Settings.builder() .put(clusterState.metadata().index("updated").getSettings()) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 2))) .put(IndexMetadata.builder("added") .version(randomLongBetween(0L, Long.MAX_VALUE - 1) - 1) // -1 because it's incremented in .put() .settings(Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1) .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_INDEX_UUID, addedIndexUuid)))) .incrementVersion().build(), clusterState); } final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); assertThat(clusterState.metadata().indices().size(), equalTo(2)); assertThat(clusterState.metadata().index("updated").getIndexUUID(), equalTo(updatedIndexUuid)); assertThat(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.get(clusterState.metadata().index("updated").getSettings()), equalTo(2)); assertThat(clusterState.metadata().index("added").getIndexUUID(), equalTo(addedIndexUuid)); assertThat(clusterState.metadata().index("deleted"), nullValue()); } } public void testReloadsMetadataAcrossMultipleSegments() throws IOException { try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { final PersistedClusterStateService persistedClusterStateService = newPersistedClusterStateService(nodeEnvironment); final int writes = between(5, 20); final List<Index> indices = new ArrayList<>(writes); try (Writer writer = persistedClusterStateService.createWriter()) { for (int i = 0; i < writes; i++) { final Index index = new Index("test-" + i, UUIDs.randomBase64UUID(random())); indices.add(index); final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); writeState(writer, 0L, ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .version(i + 2) .put(IndexMetadata.builder(index.getName()) .settings(Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID())))) .incrementVersion().build(), clusterState); } } final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); for (Index index : indices) { final IndexMetadata indexMetadata = clusterState.metadata().index(index.getName()); assertThat(indexMetadata.getIndexUUID(), equalTo(index.getUUID())); } } } @TestLogging(value = "org.elasticsearch.gateway:WARN", reason = "to ensure that we log gateway events on WARN level") public void testSlowLogging() throws IOException, IllegalAccessException { final long slowWriteLoggingThresholdMillis; final Settings settings; if (randomBoolean()) { slowWriteLoggingThresholdMillis = PersistedClusterStateService.SLOW_WRITE_LOGGING_THRESHOLD.get(Settings.EMPTY).millis(); settings = Settings.EMPTY; } else { slowWriteLoggingThresholdMillis = randomLongBetween(2, 100000); settings = Settings.builder() .put(PersistedClusterStateService.SLOW_WRITE_LOGGING_THRESHOLD.getKey(), slowWriteLoggingThresholdMillis + "ms") .build(); } final DiscoveryNode localNode = new DiscoveryNode("node", buildNewFakeTransportAddress(), Version.CURRENT); final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId())).build(); final long startTimeMillis = randomLongBetween(0L, Long.MAX_VALUE - slowWriteLoggingThresholdMillis * 10); final AtomicLong currentTime = new AtomicLong(startTimeMillis); final AtomicLong writeDurationMillis = new AtomicLong(slowWriteLoggingThresholdMillis); final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); try (NodeEnvironment nodeEnvironment = newNodeEnvironment(createDataPaths())) { PersistedClusterStateService persistedClusterStateService = new PersistedClusterStateService(nodeEnvironment, xContentRegistry(), getBigArrays(), clusterSettings, () -> currentTime.getAndAdd(writeDurationMillis.get())); try (Writer writer = persistedClusterStateService.createWriter()) { assertExpectedLogs(1L, null, clusterState, writer, new MockLogAppender.SeenEventExpectation( "should see warning at threshold", PersistedClusterStateService.class.getCanonicalName(), Level.WARN, "writing cluster state took [*] which is above the warn threshold of [*]; " + "wrote full state with [0] indices")); writeDurationMillis.set(randomLongBetween(slowWriteLoggingThresholdMillis, slowWriteLoggingThresholdMillis * 2)); assertExpectedLogs(1L, null, clusterState, writer, new MockLogAppender.SeenEventExpectation( "should see warning above threshold", PersistedClusterStateService.class.getCanonicalName(), Level.WARN, "writing cluster state took [*] which is above the warn threshold of [*]; " + "wrote full state with [0] indices")); writeDurationMillis.set(randomLongBetween(1, slowWriteLoggingThresholdMillis - 1)); assertExpectedLogs(1L, null, clusterState, writer, new MockLogAppender.UnseenEventExpectation( "should not see warning below threshold", PersistedClusterStateService.class.getCanonicalName(), Level.WARN, "*")); clusterSettings.applySettings(Settings.builder() .put(PersistedClusterStateService.SLOW_WRITE_LOGGING_THRESHOLD.getKey(), writeDurationMillis.get() + "ms") .build()); assertExpectedLogs(1L, null, clusterState, writer, new MockLogAppender.SeenEventExpectation( "should see warning at reduced threshold", PersistedClusterStateService.class.getCanonicalName(), Level.WARN, "writing cluster state took [*] which is above the warn threshold of [*]; " + "wrote full state with [0] indices")); final ClusterState newClusterState = ClusterState.builder(clusterState) .metadata(Metadata.builder(clusterState.metadata()) .version(clusterState.version()) .put(IndexMetadata.builder("test") .settings(Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_INDEX_UUID, "test-uuid")))) .incrementVersion().build(); assertExpectedLogs(1L, clusterState, newClusterState, writer, new MockLogAppender.SeenEventExpectation( "should see warning at threshold", PersistedClusterStateService.class.getCanonicalName(), Level.WARN, "writing cluster state took [*] which is above the warn threshold of [*]; " + "wrote global metadata [false] and metadata for [1] indices and skipped [0] unchanged indices")); writeDurationMillis.set(randomLongBetween(0, writeDurationMillis.get() - 1)); assertExpectedLogs(1L, clusterState, newClusterState, writer, new MockLogAppender.UnseenEventExpectation( "should not see warning below threshold", PersistedClusterStateService.class.getCanonicalName(), Level.WARN, "*")); assertThat(currentTime.get(), lessThan(startTimeMillis + 14 * slowWriteLoggingThresholdMillis)); // ensure no overflow } } } private void assertExpectedLogs(long currentTerm, ClusterState previousState, ClusterState clusterState, PersistedClusterStateService.Writer writer, MockLogAppender.LoggingExpectation expectation) throws IllegalAccessException, IOException { MockLogAppender mockAppender = new MockLogAppender(); mockAppender.start(); mockAppender.addExpectation(expectation); Logger classLogger = LogManager.getLogger(PersistedClusterStateService.class); Loggers.addAppender(classLogger, mockAppender); try { if (previousState == null) { writer.writeFullStateAndCommit(currentTerm, clusterState); } else { writer.writeIncrementalStateAndCommit(currentTerm, previousState, clusterState); } } finally { Loggers.removeAppender(classLogger, mockAppender); mockAppender.stop(); } mockAppender.assertAllExpectationsMatched(); } @Override public Settings buildEnvSettings(Settings settings) { assertTrue(settings.hasValue(Environment.PATH_DATA_SETTING.getKey())); return Settings.builder() .put(settings) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()).build(); } public static Path[] createDataPaths() { final Path[] dataPaths = new Path[randomIntBetween(1, 4)]; for (int i = 0; i < dataPaths.length; i++) { dataPaths[i] = createTempDir(); } return dataPaths; } private NodeEnvironment newNodeEnvironment(Path[] dataPaths) throws IOException { return newNodeEnvironment(Settings.builder() .putList(Environment.PATH_DATA_SETTING.getKey(), Arrays.stream(dataPaths).map(Path::toString).collect(Collectors.toList())) .build()); } private static ClusterState loadPersistedClusterState(PersistedClusterStateService persistedClusterStateService) throws IOException { final PersistedClusterStateService.OnDiskState onDiskState = persistedClusterStateService.loadBestOnDiskState(); return clusterStateFromMetadata(onDiskState.lastAcceptedVersion, onDiskState.metadata); } private static ClusterState clusterStateFromMetadata(long version, Metadata metadata) { return ClusterState.builder(ClusterName.DEFAULT).version(version).metadata(metadata).build(); } private static BigArrays getBigArrays() { return usually() ? BigArrays.NON_RECYCLING_INSTANCE : new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); } }
package com.cmart.PageControllers; import static org.junit.Assert.assertTrue; import java.util.Date; import javax.servlet.http.HttpServletRequest; import com.cmart.Data.Error; import com.cmart.Data.GlobalErrors; import com.cmart.Data.GlobalVars; import com.cmart.util.CheckInputs; import com.cmart.util.Item; import com.cmart.util.Question; public class AnswerQuestionController extends PageController{ // Variables passed in the request private long userID = -1; private String authToken = null; private long itemID; private String answer = null; private long questionID; // Structures to hold the DB data private Question question = null; private Question answerDB=null; private Item item=null; // Structures to hold the parsed page data private String redirectURL = null; /** * This method checks the page for any input errors that may have come from Client generator error * These would need to be check in real life to stop users attempting to hack and mess with things * * @param request * @author Andy ([email protected], [email protected]) */ public void checkInputs(HttpServletRequest request){ super.startTimer(); if(request != null){ super.checkInputs(request); // Get the userID (if exists), we will pass it along to the next pages try{ this.userID = CheckInputs.checkUserID(request); } catch(Error e){ } // Get the authToken (if exists), we will pass it along to the next pages try{ this.authToken = CheckInputs.checkAuthToken(request); } catch(Error e){ } // Get the itemID try{ this.itemID = CheckInputs.checkItemID(request); } catch(Error e){ if(!errors.contains(e)) errors.add(e); this.itemID = -1; } // Get the questionID (if exists), we will pass it along to the next pages try{ this.questionID = CheckInputs.checkQuestionID(request); } catch(Error e){ if(!errors.contains(e)) errors.add(e); this.questionID = -1; } // Get the answer try{ this.answer = CheckInputs.checkAnswer(request); } catch(Error e){ if(!errors.contains(e)) errors.add(e); this.answer = ""; } } // Calculate how long that took super.stopTimerAddParam(); } /** * This method get the data needed for the HTML4 page from the database * * @author Andy ([email protected], [email protected]) */ public void getHTML4Data() { super.startTimer(); // Get the question from the database if(this.question == null && this.questionID > 0){ try{ this.question = GlobalVars.DB.getQuestion(questionID); } catch(Exception e){ e.printStackTrace(); } } // Get the item from the database if(this.item == null && this.itemID > 0){ try{ this.item = GlobalVars.DB.getItem(itemID,false); } catch(Exception e){ e.printStackTrace(); } } // Calculate how long that took super.stopTimerAddProcessing(); } /** * This method processes all of the data that was read from the database such that it is ready to be printed * on to the page. We try to do as much of the page logic here as possible * * @author Andy ([email protected], [email protected]) */ public void processHTML4() { super.startTimer(); // Calculate how long that took super.stopTimerAddProcessing(); } /** * Gets the HTML5 data from the database * * @author Andy ([email protected], [email protected]) */ public void getHTML5Data(){ super.startTimer(); // Get the question from the database if(this.question == null && this.questionID > 0){ try{ this.question = GlobalVars.DB.getQuestion(questionID); } catch(Exception e){ e.printStackTrace(); } } // Calculate how long that took super.stopTimerAddDB(); } /** * Processes the HTML5 data that is needed to create the page * * @author Andy ([email protected], [email protected]) */ public void processHTML5(){ super.startTimer(); super.stopTimerAddProcessing(); } /** * Attempts to submit the answer to the database. Adds errors to the Error list if there is problems * * @return * @author Andy ([email protected], [email protected]) Bo ([email protected]) */ public boolean submitAnswer(){ // Only if we think the answer might get accepted if(this.errors.size() == 0){ if(GlobalVars.DB.checkAuthToken(this.userID, this.authToken)){ if(this.answer==null){ if(!errors.contains(GlobalErrors.answerNotPresent)) errors.add(GlobalErrors.answerNotPresent); } // We need to hit the database and get the questions details for the other checks else if(this.question == null && this.questionID > 0){ try{ this.question = GlobalVars.DB.getQuestion(questionID); } catch(Exception e){ e.printStackTrace(); } } // Check that the item is really there if(this.question == null){ if(!errors.contains(GlobalErrors.answerInvalidItem)) errors.add(GlobalErrors.answerInvalidItem); } // Check that the item is still running /* if(this.isOld || item.getEndDate().before(Calendar.getInstance().getTime())){ if(!errors.contains(GlobalErrors.answerOnFinishedAuction)) errors.add(GlobalErrors.answerOnFinishedAuction); } */ // Everything okay, so let the DB insert else if(this.errors.size() == 0){ Date date=new Date(System.currentTimeMillis()); long qID=GlobalVars.DB.insertAnswer(question.getFromUserID(), userID, itemID, question.getID(), date, answer); this.answerDB=new Question(qID,userID,this.question.getFromUserID(),itemID,false,this.questionID, date,answer); createRedirectURL(); return true; } } else if(!errors.contains(GlobalErrors.incorrectAuthToken)) errors.add(GlobalErrors.incorrectAuthToken); } return false; } /** * If we successfully insert the question then we'll need to forward them on to the browsing item. * We'll create the URL here * * @author Andy ([email protected], [email protected]) Bo ([email protected]) */ private void createRedirectURL(){ //try{ this.redirectURL = "./viewitem?userID=" + this.userID + "&authToken=" + this.authToken + "&itemID=" + this.itemID; //this.redirectURL = URLEncoder.encode(this.redirectURL, "UTF-8"); this.redirectURL.replace(" ", "%20"); //} //catch(UnsupportedEncodingException e){ // System.err.println("Encode error"); //} } /** * Returns the current userID as a String * * @return String the userID * @author Andy ([email protected], [email protected]) */ public String getUserIDString(){ return Long.toString(this.userID); } /** * Returns the authToken sent to the page * * @return string the authToken * @author Andy ([email protected], [email protected]) */ public String getAuthTokenString(){ return this.authToken; } /** * Returns the URL to be redirected to if the user successfully comments * * @return String the next URL to redirect to * @author Andy ([email protected], [email protected]) Bo ([email protected]) */ public String getRedirectURL(){ return this.redirectURL; } public Question getQuestion(){ return this.question; } public Question getAnswerDB(){ return this.answerDB; } public Item getItem(){ return this.item; } /** * This method is called to setup and run tests using the classes private variables */ @Sequenic.T2.T2annotation.exclude @org.junit.Test public void assertTests(){ // Get a user to test with long user1ID = GlobalVars.DB.checkUsernamePassword("contest1", "password1"); if(user1ID<0){ GlobalVars.DB.insertUser("contest1", "password1", "[email protected]", "user1", "1"); user1ID = GlobalVars.DB.checkUsernamePassword("contest1", "password1"); } String authToken = GlobalVars.DB.makeNewAuthToken(user1ID); // Get question with bull/invalid everything this.getHTML4Data(); assertTrue("The question should still be null", this.question==null); this.getHTML5Data(); assertTrue("The question should still be null", this.question==null); // Insert answer with bad everything assertTrue("Should not be able to insert answer", this.submitAnswer()==false); // Get question 1 this.questionID = 1l; this.question = null; this.getHTML4Data(); assertTrue("The question should not be null (make sure question id=1 exists)", this.question!=null); this.question = null; this.getHTML5Data(); Question q1 = this.question; assertTrue("The question should not be null (make sure question id=1 exists)", this.question!=null); // Get an invalid question this.questionID = 9999999999l; this.question = null; this.getHTML4Data(); assertTrue("The question should still be null (make sure question id=9999999999l does not exists)", this.question==null); this.question = null; this.getHTML5Data(); assertTrue("The question should still be null (make sure question id=9999999999l does not exists)", this.question==null); // Try to submit an answer with an invalid user id and auth token this.errors.clear(); assertTrue("Should not be able to insert answer with bad user id and auth", this.submitAnswer()==false); assertTrue("The bad auth token error should occur", this.errors.contains(GlobalErrors.incorrectAuthToken)); // Try to submit with good userID, but bad auth token this.userID = user1ID; this.errors.clear(); assertTrue("Should not be able to insert answer with good user, bad auth", this.submitAnswer()==false); assertTrue("The bad auth token error should occur", this.errors.contains(GlobalErrors.incorrectAuthToken)); // Try to submit with no user ID, but good auth token this.userID = -1; this.authToken = authToken; this.errors.clear(); assertTrue("Should not be able to insert answer bad user id, good auth", this.submitAnswer()==false); assertTrue("The bad auth token error should occur", this.errors.contains(GlobalErrors.incorrectAuthToken)); // Try to submit answer with no question ID, but username/auth good this.userID = user1ID; this.authToken = authToken; this.questionID = -1; this.question = null; this.errors.clear(); assertTrue("Should not be able to insert answer with no question", this.submitAnswer()==false); assertTrue("The invalid answer error should occur", this.errors.contains(GlobalErrors.answerInvalidItem)); // Try to insert answer with null answer this.userID = user1ID; this.authToken = authToken; this.questionID = 1; this.question = null; this.answer = null; this.errors.clear(); assertTrue("Should not be able to insert answer with null answer and good question id", this.submitAnswer()==false); assertTrue("The invalid answer error should occur", this.errors.contains(GlobalErrors.answerNotPresent)); // Try to insert answer with null answer this.userID = user1ID; this.authToken = authToken; this.questionID = -1; this.question = q1; this.answer = null; this.errors.clear(); assertTrue("Should not be able to insert answer with null answer and good question obj", this.submitAnswer()==false); assertTrue("The invalid answer error should occur", this.errors.contains(GlobalErrors.answerNotPresent)); // Try to insert an answer with good question id and answer this.userID = user1ID; this.authToken = authToken; this.questionID = 1; this.question = null; this.answer = "good"; this.errors.clear(); assertTrue("Should have inserted answer", this.submitAnswer()==true); assertTrue("No errors should occur", this.errors.isEmpty()); // Try to insert an answer with good question and answer this.userID = user1ID; this.authToken = authToken; this.questionID = -1; this.question = q1; this.answer = "good"; this.errors.clear(); assertTrue("Should have inserted answer", this.submitAnswer()==true); assertTrue("No errors should occur", this.errors.isEmpty()); } }
//============================================================================ // // Copyright (C) 2006-2022 Talend Inc. - www.talend.com // // This source code is available under agreement available at // %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt // // You should have received a copy of the agreement // along with this program; if not, write to Talend SA // 9 rue Pages 92150 Suresnes, France // //============================================================================ package org.talend.components.netsuite.client; import static org.talend.components.netsuite.NetSuiteDatasetRuntimeImpl.getCustomFieldValueClass; import static org.talend.components.netsuite.client.model.beans.Beans.getSimpleProperty; import static org.talend.components.netsuite.client.model.beans.Beans.toInitialUpper; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.talend.components.netsuite.client.model.BasicRecordType; import org.talend.components.netsuite.client.model.CustomFieldDesc; import org.talend.components.netsuite.client.model.CustomRecordTypeInfo; import org.talend.components.netsuite.client.model.CustomTransactionTypeInfo; import org.talend.components.netsuite.client.model.RecordTypeDesc; import org.talend.components.netsuite.client.model.RecordTypeInfo; import org.talend.components.netsuite.client.model.RefType; import org.talend.components.netsuite.client.model.customfield.CustomFieldRefType; import org.talend.daikon.java8.Function; /** * Implementation of <code>CustomMetaDataSource</code> which retrieves custom meta data from NetSuite and * caches retrieved data. */ public class DefaultCustomMetaDataSource<PortT> implements CustomMetaDataSource { protected transient final Logger logger = LoggerFactory.getLogger(getClass()); protected NetSuiteClientService<PortT> clientService; protected Map<String, RecordTypeInfo> customRecordTypeMap = new HashMap<>(); protected boolean customRecordTypesLoaded = false; protected Map<BasicRecordType, List<?>> customFieldMap = new HashMap<>(); protected Map<String, Map<String, CustomFieldDesc>> recordCustomFieldMap = new HashMap<>(); protected boolean customFieldsLoaded = false; protected Map<String, Map<String, CustomFieldDesc>> customRecordCustomFieldMap = new HashMap<>(); protected CustomMetaDataRetriever customMetaDataRetriever; protected static final List<BasicRecordType> fieldCustomizationTypes = Collections.unmodifiableList( Arrays.asList(BasicRecordType.CRM_CUSTOM_FIELD, BasicRecordType.ENTITY_CUSTOM_FIELD, BasicRecordType.ITEM_CUSTOM_FIELD, BasicRecordType.OTHER_CUSTOM_FIELD, BasicRecordType.TRANSACTION_BODY_CUSTOM_FIELD, BasicRecordType.TRANSACTION_COLUMN_CUSTOM_FIELD)); public DefaultCustomMetaDataSource(NetSuiteClientService<PortT> clientService, CustomMetaDataRetriever customMetaDataRetriever) { this.clientService = clientService; this.customMetaDataRetriever = customMetaDataRetriever; } /** * {@inheritDoc} */ @Override public Collection<RecordTypeInfo> getCustomRecordTypes() { return clientService.executeWithLock(new Function<Void, Collection<RecordTypeInfo>>() { @Override public Collection<RecordTypeInfo> apply(Void param) { retrieveCustomRecordTypes(); return new ArrayList(customRecordTypeMap.values()); } }, null); } /** * {@inheritDoc} */ @Override public Map<String, CustomFieldDesc> getCustomFields(RecordTypeInfo recordTypeInfo) { return clientService.executeWithLock(new Function<RecordTypeInfo, Map<String, CustomFieldDesc>>() { @Override public Map<String, CustomFieldDesc> apply(RecordTypeInfo recordTypeInfo) { return getCustomFieldsImpl(recordTypeInfo); } }, recordTypeInfo); } /** * {@inheritDoc} */ @Override public RecordTypeInfo getCustomRecordType(String typeName) { return clientService.executeWithLock(new Function<String, RecordTypeInfo>() { @Override public RecordTypeInfo apply(String typeName) { retrieveCustomRecordTypes(); return customRecordTypeMap.get(typeName); } }, typeName); } /** * Get custom field descriptors for a given record type. * * @param recordTypeInfo record type info * @return custom field descriptors as map * @throws NetSuiteException if an error occurs during obtaining of customization data */ protected Map<String, CustomFieldDesc> getCustomFieldsImpl(RecordTypeInfo recordTypeInfo) throws NetSuiteException { RecordTypeDesc recordType = recordTypeInfo.getRecordType(); Map<String, CustomFieldDesc> fieldDescMap; if (recordTypeInfo instanceof CustomRecordTypeInfo) { fieldDescMap = customRecordCustomFieldMap.get(recordTypeInfo.getName()); if (fieldDescMap == null) { retrieveCustomRecordCustomFields((CustomRecordTypeInfo) recordTypeInfo); fieldDescMap = customRecordCustomFieldMap.get(recordTypeInfo.getName()); } } else { fieldDescMap = recordCustomFieldMap.get(recordType.getType()); if (fieldDescMap == null) { retrieveCustomFields(recordType); fieldDescMap = recordCustomFieldMap.get(recordType.getType()); } } return fieldDescMap; } /** * Create custom field descriptors. * * @param recordType record type * @param customizationType customization type * @param customFieldList list of native NetSuite objects describing custom fields * @param <T> type of custom field data objects * @return custom field descriptors as map * @throws NetSuiteException if an error occurs during obtaining of customization data */ public static <T> Map<String, CustomFieldDesc> createCustomFieldDescMap(NetSuiteClientService<?> clientService, RecordTypeDesc recordType, BasicRecordType customizationType, List<T> customFieldList) throws NetSuiteException { Map<String, CustomFieldDesc> customFieldDescMap = new HashMap<>(); for (T customField : customFieldList) { CustomFieldRefType customFieldRefType = clientService.getBasicMetaData() .getCustomFieldRefType(recordType.getType(), customizationType, customField); if (customFieldRefType != null) { CustomFieldDesc customFieldDesc = new CustomFieldDesc(); String internalId = (String) getSimpleProperty(customField, "internalId"); String scriptId = (String) getSimpleProperty(customField, "scriptId"); String label = (String) getSimpleProperty(customField, "label"); NsRef customizationRef = new NsRef(); customizationRef.setRefType(RefType.CUSTOMIZATION_REF); customizationRef.setType(customizationType.getType()); customizationRef.setName(label); customizationRef.setInternalId(internalId); customizationRef.setScriptId(scriptId); customFieldDesc.setCustomizationRef(customizationRef); customFieldDesc.setName(customizationRef.getScriptId()); customFieldDesc.setCustomFieldType(customFieldRefType); customFieldDesc.setValueType(getCustomFieldValueClass(customFieldRefType)); customFieldDesc.setNullable(true); customFieldDescMap.put(customFieldDesc.getName(), customFieldDesc); } } return customFieldDescMap; } /** * Retrieve custom record types from NetSuite web service. * * @see #customRecordTypeMap * * @throws NetSuiteException if an error occurs during retrieving of customization data */ protected void retrieveCustomRecordTypes() throws NetSuiteException { if (customRecordTypesLoaded) { return; } List<NsRef> customTypes = new ArrayList<>(); List<NsRef> customRecordTypes = customMetaDataRetriever.retrieveCustomizationIds(BasicRecordType.CUSTOM_RECORD_TYPE); customTypes.addAll(customRecordTypes); List<NsRef> customTransactionTypes = customMetaDataRetriever.retrieveCustomizationIds(BasicRecordType.CUSTOM_TRANSACTION_TYPE); customTypes.addAll(customTransactionTypes); for (NsRef customizationRef : customTypes) { String recordType = customizationRef.getType(); RecordTypeDesc recordTypeDesc = null; BasicRecordType basicRecordType = BasicRecordType.getByType(recordType); if (basicRecordType != null) { recordTypeDesc = clientService.getBasicMetaData() .getRecordType(toInitialUpper(basicRecordType.getSearchType())); } RecordTypeInfo customTypeInfo = basicRecordType != BasicRecordType.CUSTOM_TRANSACTION_TYPE ? new CustomRecordTypeInfo(customizationRef.getScriptId(), recordTypeDesc, customizationRef) : new CustomTransactionTypeInfo(customizationRef.getScriptId(), recordTypeDesc); customRecordTypeMap.put(customTypeInfo.getName(), customTypeInfo); } customRecordTypesLoaded = true; } /** * Retrieve custom fields for a given record type. * * @param recordType record type * @throws NetSuiteException if an error occurs during retrieving of customization data */ protected void retrieveCustomFields(RecordTypeDesc recordType) throws NetSuiteException { retrieveCustomFields(); Map<String, CustomFieldDesc> fieldDescMap = new HashMap<>(); for (BasicRecordType customizationType : fieldCustomizationTypes) { List<?> customFieldList = customFieldMap.get(customizationType); Map<String, CustomFieldDesc> customFieldDescMap = createCustomFieldDescMap(clientService, recordType, customizationType, customFieldList); fieldDescMap.putAll(customFieldDescMap); } recordCustomFieldMap.put(recordType.getType(), fieldDescMap); } /** * Retrieve custom fields for standard record types from NetSuite web service. * * @throws NetSuiteException if an error occurs during retrieving of customization data */ protected void retrieveCustomFields() throws NetSuiteException { if (customFieldsLoaded) { return; } Map<BasicRecordType, List<NsRef>> fieldCustomizationRefs = new HashMap<>(32); for (BasicRecordType customizationType : fieldCustomizationTypes) { List<NsRef> customizationRefs = customMetaDataRetriever.retrieveCustomizationIds(customizationType); fieldCustomizationRefs.put(customizationType, customizationRefs); } for (BasicRecordType customizationType : fieldCustomizationTypes) { List<NsRef> customizationRefs = fieldCustomizationRefs.get(customizationType); List<?> fieldCustomizationList = customMetaDataRetriever.retrieveCustomizations(customizationRefs); customFieldMap.put(customizationType, fieldCustomizationList); } customFieldsLoaded = true; } /** * Retrieve custom fields for a given custom record type. * * @param recordTypeInfo custom record type * @throws NetSuiteException if an error occurs during retrieving of customization data */ protected void retrieveCustomRecordCustomFields(CustomRecordTypeInfo recordTypeInfo) throws NetSuiteException { Map<String, CustomFieldDesc> recordCustomFieldMap = customRecordCustomFieldMap.get(recordTypeInfo.getName()); if (recordCustomFieldMap != null) { return; } recordCustomFieldMap = customMetaDataRetriever.retrieveCustomRecordCustomFields( recordTypeInfo.getRecordType(), recordTypeInfo.getCustomizationRef()); customRecordCustomFieldMap.put(recordTypeInfo.getName(), recordCustomFieldMap); } public interface CustomMetaDataRetriever { /** * Retrieve customization IDs for given customization type. * * @param type customization type * @return list of customization refs * @throws NetSuiteException if an error occurs during retrieving */ List<NsRef> retrieveCustomizationIds(final BasicRecordType type) throws NetSuiteException; /** * Retrieve customization for given customization refs. * * @param nsCustomizationRefs customization refs which to retrieve customization data for * @return list of customization records * @throws NetSuiteException if an error occurs during retrieving */ List<?> retrieveCustomizations(final List<NsRef> nsCustomizationRefs) throws NetSuiteException; /** * Retrieve custom fields for given custom record type. * * @param recordType custom record type descriptor * @param nsCustomizationRef customization ref for the custom record type * @return custom field map which contains <code>(custom field name, custom field descriptor)</code> entries * @throws NetSuiteException if an error occurs during retrieving */ Map<String, CustomFieldDesc> retrieveCustomRecordCustomFields(final RecordTypeDesc recordType, final NsRef nsCustomizationRef) throws NetSuiteException; } }
/* * Copyright 2018 LinkedIn Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package com.github.ambry.replication; import com.codahale.metrics.MetricRegistry; import com.github.ambry.clustermap.ClusterMap; import com.github.ambry.clustermap.ClusterMapChangeListener; import com.github.ambry.clustermap.DataNodeId; import com.github.ambry.clustermap.MockPartitionId; import com.github.ambry.clustermap.PartitionId; import com.github.ambry.clustermap.ReplicaEventType; import com.github.ambry.clustermap.ReplicaId; import com.github.ambry.commons.BlobId; import com.github.ambry.commons.BlobIdFactory; import com.github.ambry.messageformat.BlobProperties; import com.github.ambry.messageformat.BlobType; import com.github.ambry.messageformat.DeleteMessageFormatInputStream; import com.github.ambry.messageformat.MessageFormatException; import com.github.ambry.messageformat.MessageFormatInputStream; import com.github.ambry.messageformat.MessageFormatRecord; import com.github.ambry.messageformat.MetadataContentSerDe; import com.github.ambry.messageformat.PutMessageFormatBlobV1InputStream; import com.github.ambry.messageformat.PutMessageFormatInputStream; import com.github.ambry.store.Message; import com.github.ambry.store.MessageInfo; import com.github.ambry.store.MockStoreKeyConverterFactory; import com.github.ambry.store.StoreKey; import com.github.ambry.store.StoreKeyConverter; import com.github.ambry.store.TransformationOutput; import com.github.ambry.utils.ByteBufferInputStream; import com.github.ambry.utils.Pair; import com.github.ambry.utils.TestUtils; import com.github.ambry.utils.Utils; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import org.json.JSONObject; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import static org.junit.Assert.*; /** * Tests the BlobIdTransformer */ @RunWith(Parameterized.class) public class BlobIdTransformerTest { private final ClusterMap clusterMap = new MockReadingClusterMap(); private final BlobIdFactory blobIdFactory = new BlobIdFactory(clusterMap); private final BlobIdTransformer transformer; private final List<Pair> pairList; private final MockStoreKeyConverterFactory factory; private final short metadataContentVersion; private static final int BLOB_STREAM_SIZE = 128; private static final int BLOB_ENCRYPTION_KEY_SIZE = 32; private static final int USER_META_DATA_SIZE = 64; private static final int COMPOSITE_BLOB_SIZE = 8000000; private static final int COMPOSITE_BLOB_DATA_CHUNK_SIZE = 4000000; public static final Pair<String, String> BLOB_ID_PAIR_VERSION_1_CONVERTED = new Pair<>("AAEAAQAAAAAAAAAhAAAAJDkwNTUwOTJhLTc3ZTAtNDI4NC1iY2IxLTc2MDZlYTAzNWM4OQ", "AAMB_wE5AAIAAQAAAAAAAAAhAAAAJDkwNTUwOTJhLTc3ZTAtNDI4NC1iY2IxLTc2MDZlYTAzNWM4OQ"); public static final Pair<String, String> BLOB_ID_PAIR_VERSION_2_CONVERTED = new Pair<>("AAIAAQB8AAIAAQAAAAAAAAAbAAAAJDRiYTE0YzFkLTFjNmUtNDYyNC04ZDcyLTU3ZDQzZjgzOWM4OQ", "AAMBAQB8AAIAAQAAAAAAAAAbAAAAJDRiYTE0YzFkLTFjNmUtNDYyNC04ZDcyLTU3ZDQzZjgzOWM4OQ"); public static final Pair<String, String> BLOB_ID_PAIR_VERSION_3_CONVERTED = new Pair<>("AAMAAgCgAAMAAQAAAAAAAACEAAAAJDYwMmQ0ZGQxLTQ5NDUtNDg0YS05MmQwLTI5YjVkM2ZlOWM4OQ", "AAMBAgCgAAIAAQAAAAAAAACEAAAAJDYwMmQ0ZGQxLTQ5NDUtNDg0YS05MmQwLTI5YjVkM2ZlOWM4OQ"); public static final Pair<String, String> BLOB_ID_PAIR_VERSION_3_NULL = new Pair<>("AAMAAAAAAAAAAAAAAAAAAAAAAAAAJDNlM2U1YzY0LTgxMWItNDVlZi04N2QzLTgyZmZmOWRmNTIxOA", null); public static final String VERSION_1_UNCONVERTED = "AAEAAQAAAAAAAABZAAAAJGYwMjRiYzIyLTA4NDMtNGNjMC1iMzNiLTUyOGZmZTA4NWM4OQ"; public static final String VERSION_3_UNCONVERTED = "AAMAAAAAAAAAAAAAAAAAAAAAAAAAJDUyYTk2OWIyLTA3YWMtNDBhMC05ZmY2LTUxY2ZkZjY4NWM4OQ"; //All these Metadata related pairs have the same //account ID: 313 and the same container ID: 2 public static final Pair<String, String> BLOB_ID_VERSION_1_METADATA_CONVERTED = new Pair<>("AAEAAQAAAAAAAABSAAAAJGQ2YjM0YzI2LWU0MjMtNGNkNC1iMGZhLTU5Yzc2YmVhZjk2ZA", "AAMB_wE5AAIAAQAAAAAAAABSAAAAJGQ2YjM0YzI2LWU0MjMtNGNkNC1iMGZhLTU5Yzc2YmVhZjk2ZA"); public static final Pair<String, String> BLOB_ID_VERSION_1_METADATA_UNCONVERTED = new Pair<>("AAEAAQAAAAAAAABiAAAAJGVlM2YzYjFkLTA4NDEtNGZmMS04MGVmLTU4MWM4ZWIwNjkzOQ", "AAEAAQAAAAAAAABiAAAAJGVlM2YzYjFkLTA4NDEtNGZmMS04MGVmLTU4MWM4ZWIwNjkzOQ"); public static final Pair<String, String> BLOB_ID_VERSION_1_DATACHUNK_0_CONVERTED = new Pair<>("AAEAAQAAAAAAAABlAAAAJDJjNzhmYTYxLTlhZDQtNDg2YS1iZTZkLWFlMGE0ODNjNTI2YQ", "AAMB_wE5AAIAAQAAAAAAAABlAAAAJDJjNzhmYTYxLTlhZDQtNDg2YS1iZTZkLWFlMGE0ODNjNTI2YQ"); public static final Pair<String, String> BLOB_ID_VERSION_1_DATACHUNK_1_CONVERTED = new Pair<>("AAEAAQAAAAAAAAAkAAAAJGQyZmYxMDE5LTBmMDQtNDEwNi05NDBjLWY5ZTgwYTU2ZmY1YQ", "AAMB_wE5AAIAAQAAAAAAAAAkAAAAJGQyZmYxMDE5LTBmMDQtNDEwNi05NDBjLWY5ZTgwYTU2ZmY1YQ"); public static final Pair<String, String> BLOB_ID_VERSION_1_DATACHUNK_1_UNCONVERTED = new Pair<>("AAEAAQAAAAAAAAAHAAAAJGIxZmYwYmE5LTMwYTAtNDY0OC05MzUyLWZjYWViY2M4YTgzMQ", "AAEAAQAAAAAAAAAHAAAAJGIxZmYwYmE5LTMwYTAtNDY0OC05MzUyLWZjYWViY2M4YTgzMQ"); private static final Class[] VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS = new Class[]{PutMessageFormatInputStream.class, PutMessageFormatBlobV1InputStream.class}; /** * Running for both regular and encrypted blobs, and versions 2 and 3 of MetadataContent * @return an array with all four different choices */ @Parameterized.Parameters public static List<Object[]> data() { return Arrays.asList(new Object[][]{{MessageFormatRecord.Metadata_Content_Version_V2}, {MessageFormatRecord.Metadata_Content_Version_V3}}); } /** * Sets up common components * @throws IOException */ public BlobIdTransformerTest(int metadataContentVersion) throws Exception { this.metadataContentVersion = (short) metadataContentVersion; Pair<String, String>[] pairs = new Pair[]{BLOB_ID_PAIR_VERSION_1_CONVERTED, BLOB_ID_PAIR_VERSION_2_CONVERTED, BLOB_ID_PAIR_VERSION_3_CONVERTED, BLOB_ID_PAIR_VERSION_3_NULL, BLOB_ID_VERSION_1_METADATA_CONVERTED, BLOB_ID_VERSION_1_DATACHUNK_0_CONVERTED, BLOB_ID_VERSION_1_DATACHUNK_1_CONVERTED, BLOB_ID_VERSION_1_DATACHUNK_1_UNCONVERTED, BLOB_ID_VERSION_1_METADATA_UNCONVERTED}; factory = new MockStoreKeyConverterFactory(null, null); factory.setReturnInputIfAbsent(true); StoreKeyConverter storeKeyConverter = createAndSetupMockStoreKeyConverter(factory, pairs); transformer = new BlobIdTransformer(blobIdFactory, storeKeyConverter); pairList = new ArrayList<>(Arrays.asList(pairs)); pairList.add(new Pair<>(VERSION_3_UNCONVERTED, VERSION_3_UNCONVERTED)); preConvertPairFirsts(pairList, storeKeyConverter); } /** * Tests basic use of transformer with blobs that can be converted and those that aren't * @throws Exception */ @Test public void testBasicOperation() throws Exception { for (Pair pair : pairList) { for (Class clazz : VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS) { for (boolean divergeInfoFromData : new boolean[]{false, true}) { InputAndExpected inputAndExpected = new InputAndExpected(pair, clazz, divergeInfoFromData); TransformationOutput output = transformer.transform(inputAndExpected.getInput()); assertNull("output exception should be null", output.getException()); verifyOutput(output.getMsg(), inputAndExpected.getExpected()); } } } } /** * Tests metadata blob transformation * @throws IOException * @throws MessageFormatException */ @Test public void testMetaDataBlobOperation() throws IOException, MessageFormatException { InputAndExpected inputAndExpected = new InputAndExpected(BLOB_ID_VERSION_1_METADATA_CONVERTED, VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], false, new String[]{BLOB_ID_VERSION_1_DATACHUNK_0_CONVERTED.getFirst(), BLOB_ID_VERSION_1_DATACHUNK_1_CONVERTED.getFirst()}, new String[]{BLOB_ID_VERSION_1_DATACHUNK_0_CONVERTED.getSecond(), BLOB_ID_VERSION_1_DATACHUNK_1_CONVERTED.getSecond()}); TransformationOutput output = transformer.transform(inputAndExpected.getInput()); assertNull("output exception should be null", output.getException()); verifyOutput(output.getMsg(), inputAndExpected.getExpected()); } /** * Tests that metadata blobs with bad blob property size * get corrected (blob size == composite datachunk total size) in transformation * @throws IOException * @throws MessageFormatException */ @Test public void testBrokenSizeMetaDataBlobOperation() throws IOException, MessageFormatException { InputAndExpected inputAndExpected = new InputAndExpected(BLOB_ID_VERSION_1_METADATA_CONVERTED, VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], false, true, new String[]{BLOB_ID_VERSION_1_DATACHUNK_0_CONVERTED.getFirst(), BLOB_ID_VERSION_1_DATACHUNK_1_CONVERTED.getFirst()}, new String[]{BLOB_ID_VERSION_1_DATACHUNK_0_CONVERTED.getSecond(), BLOB_ID_VERSION_1_DATACHUNK_1_CONVERTED.getSecond()}); TransformationOutput output = transformer.transform(inputAndExpected.getInput()); assertNull("output exception should be null", output.getException()); verifyOutput(output.getMsg(), inputAndExpected.getExpected()); } /** * Tests that correct exception is made when transformation is attempted * on a metadata chunk with a deprecated data chunk * @throws IOException * @throws MessageFormatException */ @Test public void testBrokenDeprecatedMetaDataBlobOperation() throws IOException, MessageFormatException { InputAndExpected inputAndExpected = new InputAndExpected(pairList.get(0), VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], false, new String[]{BLOB_ID_PAIR_VERSION_3_NULL.getFirst(), BLOB_ID_PAIR_VERSION_3_CONVERTED.getFirst()}, null); assertException(transformer.transform(inputAndExpected.getInput()), IllegalStateException.class); } /** * Tests that correct exception is made when transformation is attempted * on a changed metadata chunk with an unchanged data chunk * @throws IOException * @throws MessageFormatException */ @Test public void testBrokenUnchangedMetaDataBlobOperation() throws IOException, MessageFormatException { InputAndExpected inputAndExpected = new InputAndExpected(BLOB_ID_VERSION_1_METADATA_CONVERTED, VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], false, new String[]{BLOB_ID_VERSION_1_DATACHUNK_0_CONVERTED.getFirst(), BLOB_ID_VERSION_1_DATACHUNK_1_UNCONVERTED.getFirst()}, null); assertException(transformer.transform(inputAndExpected.getInput()), IllegalStateException.class); } /** * Tests that correct exception is made when transformation is attempted * on a unchanged metadata chunk with an changed data chunk * @throws IOException * @throws MessageFormatException */ @Test public void testBrokenChangedMetaDataBlobOperation() throws IOException, MessageFormatException { InputAndExpected inputAndExpected = new InputAndExpected(BLOB_ID_VERSION_1_METADATA_UNCONVERTED, VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], false, new String[]{BLOB_ID_VERSION_1_DATACHUNK_0_CONVERTED.getFirst(), BLOB_ID_VERSION_1_DATACHUNK_1_UNCONVERTED.getFirst()}, null); assertException(transformer.transform(inputAndExpected.getInput()), IllegalStateException.class); } /** * Tests that correct exception is made when transformation is attempted * on a changed metadata chunk with an datachunks with a different account ID / container ID * @throws IOException * @throws MessageFormatException */ @Test public void testBrokenDifferentAccountIdContainerIdMetaDataBlobOperation() throws IOException, MessageFormatException { InputAndExpected inputAndExpected = new InputAndExpected(pairList.get(0), VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], false, new String[]{BLOB_ID_PAIR_VERSION_2_CONVERTED.getFirst(), BLOB_ID_PAIR_VERSION_3_CONVERTED.getFirst()}, null); assertException(transformer.transform(inputAndExpected.getInput()), IllegalStateException.class); } /** * Tests a non-put message input to the transformer * @throws Exception */ @Test public void testNonPutTransform() throws Exception { InputAndExpected inputAndExpected = new InputAndExpected(pairList.get(0), DeleteMessageFormatInputStream.class, false); assertException(transformer.transform(inputAndExpected.getInput()), IllegalArgumentException.class); } /** * Tests putting in garbage input in the message inputStream into the transformer * @throws Exception */ @Test public void testGarbageInputStream() throws Exception { InputAndExpected inputAndExpected = new InputAndExpected(pairList.get(0), null, false); assertException(transformer.transform(inputAndExpected.getInput()), MessageFormatException.class); } /** * Tests transformer when the underlying StoreKeyConverter isn't working * @throws Exception */ @Test public void testBrokenStoreKeyConverter() throws Exception { InputAndExpected inputAndExpected = new InputAndExpected(pairList.get(0), VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], false); TransformationOutput output = transformer.transform(inputAndExpected.getInput()); verifyOutput(output.getMsg(), inputAndExpected.getExpected()); factory.setException(new BlobIdTransformerTestException()); inputAndExpected = new InputAndExpected(pairList.get(1), VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], false); output = transformer.transform(inputAndExpected.getInput()); Assert.assertTrue("Should lead to IllegalStateException", output.getException() instanceof IllegalStateException); factory.setException(null); inputAndExpected = new InputAndExpected(pairList.get(2), VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], false); output = transformer.transform(inputAndExpected.getInput()); verifyOutput(output.getMsg(), inputAndExpected.getExpected()); } /** * Tests creating the transformer with a null StoreKeyConverter */ @Test public void testNullStoreKeyConverter() throws IOException { try { new BlobIdTransformer(blobIdFactory, null); fail("Did not throw NullPointerException"); } catch (NullPointerException e) { //expected } } /** * Tests creating the transformer with a null StoreKeyFactory */ @Test public void testNullStoreKeyFactory() throws IOException { try { new BlobIdTransformer(null, factory.getStoreKeyConverter()); fail("Did not throw NullPointerException"); } catch (NullPointerException e) { //expected } } /** * Tests BlobIdTransformer's warmup() method * @throws Exception */ @Test public void testWarmup() throws Exception { InputAndExpected inputAndExpected = new InputAndExpected(pairList.get(0), VALID_MESSAGE_FORMAT_INPUT_STREAM_IMPLS[0], true); BlobIdTransformer transformer = new BlobIdTransformer(blobIdFactory, factory.getStoreKeyConverter()); TransformationOutput output = transformer.transform(inputAndExpected.getInput()); Assert.assertTrue("Should lead to IllegalStateException", output.getException() instanceof IllegalStateException); transformer.warmup(Collections.singletonList(inputAndExpected.getInput().getMessageInfo())); output = transformer.transform(inputAndExpected.getInput()); assertNull(output.getException()); verifyOutput(output.getMsg(), inputAndExpected.getExpected()); } /** * Tests using the transformer with null input to the transform method * @throws Exception */ @Test public void testNullTransformInput() throws Exception { assertException(transformer.transform(null), NullPointerException.class); } /** * Tests using the transformer with Message inputs that have null components * @throws Exception */ @Test public void testNullComponentsTransformInput() throws Exception { MessageInfo messageInfo = new MessageInfo(createBlobId(VERSION_1_UNCONVERTED), 123, (short) 123, (short) 123, 0L); //null msgBytes Message message = new Message(messageInfo, null); assertException(transformer.transform(message), NullPointerException.class); //null messageInfo message = new Message(null, new ByteArrayInputStream(new byte[30])); assertException(transformer.transform(message), NullPointerException.class); } private BlobId createBlobId(String hexBlobId) throws IOException { if (hexBlobId == null) { return null; } return new BlobId(hexBlobId, clusterMap); } private StoreKeyConverter createAndSetupMockStoreKeyConverter(MockStoreKeyConverterFactory factory, Pair<String, String>[] pairs) throws Exception { Map<StoreKey, StoreKey> map = new HashMap<>(); for (Pair<String, String> pair : pairs) { map.put(createBlobId(pair.getFirst()), createBlobId(pair.getSecond())); } factory.setConversionMap(map); return factory.getStoreKeyConverter(); } /** * Runs all the {@link Pair#getFirst()} outputs from the {@link Pair} list through * the {@link StoreKeyConverter} storeKeyConverter. * Intended to be run so that the StoreKeyConverter's * {@link StoreKeyConverter#getConverted(StoreKey)} method can * work on any of the pairs' getFirst() outputs. * @param pairs * @param storeKeyConverter * @throws Exception {@link StoreKeyConverter#convert(Collection)} may throw an Exception */ private void preConvertPairFirsts(List<Pair> pairs, StoreKeyConverter storeKeyConverter) throws Exception { List<StoreKey> pairFirsts = new ArrayList<>(); for (Pair<String, String> pair : pairs) { pairFirsts.add(createBlobId(pair.getFirst())); } storeKeyConverter.convert(pairFirsts); } private void verifyOutput(Message output, Message expected) throws IOException { if (expected == null) { assertNull("output should be null", output); } else { assertEquals("MessageInfos not equal", expected.getMessageInfo(), output.getMessageInfo()); TestUtils.assertInputStreamEqual(expected.getStream(), output.getStream(), (int) expected.getMessageInfo().getSize(), true); } } private void assertException(TransformationOutput transformationOutput, Class exceptionClass) { assertNull("Message in output is not null", transformationOutput.getMsg()); assertTrue("Exception from output is not " + exceptionClass.getName(), exceptionClass.isInstance(transformationOutput.getException())); } /** * Creates a random Message input and a related expected Message output */ private class InputAndExpected { private final Message input; private final Message expected; private final long randomStaticSeed = new Random().nextLong(); private Random buildRandom = new Random(randomStaticSeed); /** * Constructs the input and expected * @param pair the pair of blob ids (old, new) * @param clazz the put message input stream class to use * @param divergeInfoFromData if {@code true}, changes some fields in the info to be different from what is in the * data * @throws IOException * @throws MessageFormatException */ InputAndExpected(Pair<String, String> pair, Class clazz, boolean divergeInfoFromData) throws IOException, MessageFormatException { this(pair, clazz, divergeInfoFromData, null, null); } InputAndExpected(Pair<String, String> pair, Class clazz, boolean divergeInfoFromData, String[] dataChunkIdsInput, String[] dataChunkIdsExpected) throws IOException, MessageFormatException { this(pair, clazz, divergeInfoFromData, false, dataChunkIdsInput, dataChunkIdsExpected); } InputAndExpected(Pair<String, String> pair, Class clazz, boolean divergeInfoFromData, boolean brokenMetadataChunk, String[] dataChunkIdsInput, String[] dataChunkIdsExpected) throws IOException, MessageFormatException { boolean hasEncryption = clazz == PutMessageFormatInputStream.class; Long crcInput = buildRandom.nextLong(); input = buildMessage(pair.getFirst(), clazz, hasEncryption, crcInput, divergeInfoFromData, brokenMetadataChunk, dataChunkIdsInput); if (pair.getSecond() == null) { //can't just assign 'input' since Message has an //InputStream that is modified when read expected = null;//buildMessage(pair.getFirst(), PutMessageFormatInputStream.class, hasEncryption); } else { Long crcExpected = pair.getSecond().equals(pair.getFirst()) ? crcInput : null; expected = buildMessage(pair.getSecond(), PutMessageFormatInputStream.class, hasEncryption, crcExpected, divergeInfoFromData, false, dataChunkIdsExpected); } } public Message getInput() { return input; } public Message getExpected() { return expected; } private byte[] randomByteArray(int size) { byte[] bytes = new byte[size]; buildRandom.nextBytes(bytes); return bytes; } private ByteBuffer randomByteBuffer(int size) { return ByteBuffer.wrap(randomByteArray(size)); } private Message buildMessage(String blobIdString, Class clazz, boolean hasEncryption, Long crcInMsgInfo, boolean divergeInfoFromData, boolean brokenMetadataChunk, String... dataChunkIds) throws IOException, MessageFormatException { buildRandom = new Random(randomStaticSeed); //If there are datachunks, it's a metadata blob. //If not, its a data blob InputStream blobStream; long blobStreamSize; long blobPropertiesSize; ByteBuffer byteBuffer; BlobType blobType; if (dataChunkIds == null) { blobStreamSize = BLOB_STREAM_SIZE; blobPropertiesSize = blobStreamSize; blobStream = createBlobStream(); blobType = BlobType.DataBlob; } else { byteBuffer = createMetadataByteBuffer(dataChunkIds); blobStreamSize = byteBuffer.remaining(); if (brokenMetadataChunk) { blobPropertiesSize = blobStreamSize; } else { blobPropertiesSize = COMPOSITE_BLOB_SIZE; } blobStream = new ByteBufferInputStream(byteBuffer); blobType = BlobType.MetadataBlob; } BlobId blobId = createBlobId(blobIdString); ByteBuffer blobEncryptionKey = randomByteBuffer(BLOB_ENCRYPTION_KEY_SIZE); if (!hasEncryption) { blobEncryptionKey = null; } ByteBuffer userMetaData = randomByteBuffer(USER_META_DATA_SIZE); InputStream inputStream; int inputStreamSize; MessageInfo messageInfo; BlobProperties blobProperties = new BlobProperties(blobPropertiesSize, "serviceId", "ownerId", "contentType", false, 0, 0, blobId.getAccountId(), blobId.getContainerId(), hasEncryption, null, "gzip", "filename"); if (clazz != null) { MessageFormatInputStream messageFormatInputStream; if (clazz == PutMessageFormatInputStream.class) { messageFormatInputStream = new PutMessageFormatInputStream(blobId, blobEncryptionKey, blobProperties, userMetaData, blobStream, blobStreamSize, blobType); } else if (clazz == DeleteMessageFormatInputStream.class) { messageFormatInputStream = new DeleteMessageFormatInputStream(blobId, blobId.getAccountId(), blobId.getContainerId(), 0); } else {//if (clazz == PutMessageFormatBlobV1InputStream.class) { messageFormatInputStream = new PutMessageFormatBlobV1InputStream(blobId, blobProperties, userMetaData, blobStream, blobStreamSize, blobType); } inputStreamSize = (int) messageFormatInputStream.getSize(); inputStream = messageFormatInputStream; } else { inputStream = new ByteArrayInputStream(randomByteArray(100)); inputStreamSize = 100; } boolean ttlUpdated = false; long expiryTimeMs = Utils.addSecondsToEpochTime(blobProperties.getCreationTimeInMs(), blobProperties.getTimeToLiveInSeconds()); if (divergeInfoFromData) { ttlUpdated = true; expiryTimeMs = Utils.Infinite_Time; } messageInfo = new MessageInfo(blobId, inputStreamSize, false, ttlUpdated, expiryTimeMs, crcInMsgInfo, blobId.getAccountId(), blobId.getContainerId(), blobProperties.getCreationTimeInMs()); return new Message(messageInfo, inputStream); } private InputStream createBlobStream() { return new ByteArrayInputStream(randomByteArray(BLOB_STREAM_SIZE)); } /** * Creates metadata blob data buffer from supplied datachunkIds * @param datachunkIds * @return * @throws IOException */ private ByteBuffer createMetadataByteBuffer(String... datachunkIds) throws IOException { List<StoreKey> storeKeys = new ArrayList<>(); for (String datachunkId : datachunkIds) { storeKeys.add(blobIdFactory.getStoreKey(datachunkId)); } ByteBuffer output; switch (metadataContentVersion) { case MessageFormatRecord.Metadata_Content_Version_V2: output = MetadataContentSerDe.serializeMetadataContentV2(COMPOSITE_BLOB_DATA_CHUNK_SIZE, COMPOSITE_BLOB_SIZE, storeKeys); break; case MessageFormatRecord.Metadata_Content_Version_V3: int totalLeft = COMPOSITE_BLOB_SIZE; List<Pair<StoreKey, Long>> keyAndSizeList = new ArrayList<>(); int i = 0; while (totalLeft >= COMPOSITE_BLOB_DATA_CHUNK_SIZE) { keyAndSizeList.add(new Pair<>(storeKeys.get(i++), (long) COMPOSITE_BLOB_DATA_CHUNK_SIZE)); totalLeft -= COMPOSITE_BLOB_DATA_CHUNK_SIZE; } if (totalLeft > 0) { keyAndSizeList.add(new Pair<>(storeKeys.get(i), (long) totalLeft)); } output = MetadataContentSerDe.serializeMetadataContentV3(COMPOSITE_BLOB_SIZE, keyAndSizeList); break; default: throw new IllegalStateException("Unexpected metadata content version: " + metadataContentVersion); } output.flip(); return output; } } private class BlobIdTransformerTestException extends Exception { } /** * Mock clusterMap used when one wants the inputStream input for getPartitionIdFromStream * to be read and have constructed a MockPartitionId from the input */ private class MockReadingClusterMap implements ClusterMap { private boolean throwException = false; public MockReadingClusterMap() { } public PartitionId getPartitionIdFromStream(InputStream inputStream) throws IOException { if (this.throwException) { throw new IOException(); } else { byte[] bytes = new byte[10]; inputStream.read(bytes); ByteBuffer bb = ByteBuffer.wrap(bytes); bb.getShort(); long num = bb.getLong(); return new MockPartitionId(num, (String) null); } } @Override public PartitionId getPartitionIdByName(String partitionIdStr) { return null; } public List<? extends PartitionId> getWritablePartitionIds(String partitionClass) { return null; } public PartitionId getRandomWritablePartition(String partitionClass, List<PartitionId> partitionsToExclude) { return null; } public List<? extends PartitionId> getAllPartitionIds(String partitionClass) { return null; } public boolean hasDatacenter(String s) { return false; } public byte getLocalDatacenterId() { return 0; } public String getDatacenterName(byte b) { return null; } public DataNodeId getDataNodeId(String s, int i) { return null; } public List<? extends ReplicaId> getReplicaIds(DataNodeId dataNodeId) { return null; } public List<? extends DataNodeId> getDataNodeIds() { return null; } public MetricRegistry getMetricRegistry() { return null; } public void onReplicaEvent(ReplicaId replicaId, ReplicaEventType replicaEventType) { } @Override public JSONObject getSnapshot() { return null; } @Override public ReplicaId getBootstrapReplica(String partitionIdStr, DataNodeId dataNodeId) { return null; } @Override public void registerClusterMapListener(ClusterMapChangeListener clusterMapChangeListener) { } public void close() { } } }
/* ****************************************************************************** * Copyright (c) 2015-2018 Skymind, Inc. * Copyright (c) 2019 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.ui; import org.apache.commons.io.IOUtils; import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.api.storage.StatsStorage; import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; import org.deeplearning4j.nn.api.OptimizationAlgorithm; import org.deeplearning4j.nn.conf.ComputationGraphConfiguration; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.layers.DenseLayer; import org.deeplearning4j.nn.conf.layers.OutputLayer; import org.deeplearning4j.nn.conf.layers.variational.GaussianReconstructionDistribution; import org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder; import org.deeplearning4j.nn.conf.serde.JsonMappers; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.weights.WeightInit; import org.deeplearning4j.optimize.listeners.ScoreIterationListener; import org.deeplearning4j.ui.api.UIServer; import org.deeplearning4j.ui.stats.StatsListener; import org.deeplearning4j.ui.storage.InMemoryStatsStorage; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.function.Function; import org.nd4j.linalg.learning.config.Sgd; import org.nd4j.linalg.lossfunctions.LossFunctions; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; import static org.junit.Assert.*; /** * Created by Alex on 08/10/2016. */ @Ignore public class TestVertxUI extends BaseDL4JTest { @Before public void setUp() throws Exception { UIServer.stopInstance(); } @Test @Ignore public void testUI() throws Exception { StatsStorage ss = new InMemoryStatsStorage(); VertxUIServer uiServer = (VertxUIServer) UIServer.getInstance(); assertEquals(9000, uiServer.getPort()); uiServer.stop(); VertxUIServer vertxUIServer = new VertxUIServer(); // vertxUIServer.runMain(new String[] {"--uiPort", "9100", "-r", "true"}); // // assertEquals(9100, vertxUIServer.getPort()); // vertxUIServer.stop(); // uiServer.attach(ss); // // MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() // .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) // .list() // .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build()) // .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(4).nOut(3).build()) // .build(); // // MultiLayerNetwork net = new MultiLayerNetwork(conf); // net.init(); // net.setListeners(new StatsListener(ss, 3), new ScoreIterationListener(1)); // // DataSetIterator iter = new IrisDataSetIterator(150, 150); // // for (int i = 0; i < 500; i++) { // net.fit(iter); //// Thread.sleep(100); // Thread.sleep(100); // } // //// uiServer.stop(); Thread.sleep(100000); } @Test @Ignore public void testUI_VAE() throws Exception { //Variational autoencoder - for unsupervised layerwise pretraining StatsStorage ss = new InMemoryStatsStorage(); UIServer uiServer = UIServer.getInstance(); uiServer.attach(ss); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new Sgd(1e-5)) .list().layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3).encoderLayerSizes(10, 11) .decoderLayerSizes(12, 13).weightInit(WeightInit.XAVIER) .pzxActivationFunction(Activation.IDENTITY) .reconstructionDistribution( new GaussianReconstructionDistribution()) .activation(Activation.LEAKYRELU).build()) .layer(1, new VariationalAutoencoder.Builder().nIn(3).nOut(3).encoderLayerSizes(7) .decoderLayerSizes(8).weightInit(WeightInit.XAVIER) .pzxActivationFunction(Activation.IDENTITY) .reconstructionDistribution(new GaussianReconstructionDistribution()) .activation(Activation.LEAKYRELU).build()) .layer(2, new OutputLayer.Builder().nIn(3).nOut(3).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); net.setListeners(new StatsListener(ss), new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 50; i++) { net.fit(iter); Thread.sleep(100); } Thread.sleep(100000); } @Test @Ignore public void testUIMultipleSessions() throws Exception { for (int session = 0; session < 3; session++) { StatsStorage ss = new InMemoryStatsStorage(); UIServer uiServer = UIServer.getInstance(); uiServer.attach(ss); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build()) .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(4).nOut(3).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); net.setListeners(new StatsListener(ss, 1), new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 20; i++) { net.fit(iter); Thread.sleep(100); } } Thread.sleep(1000000); } @Test @Ignore public void testUISequentialSessions() throws Exception { UIServer uiServer = UIServer.getInstance(); StatsStorage ss = null; for (int session = 0; session < 3; session++) { if (ss != null) { uiServer.detach(ss); } ss = new InMemoryStatsStorage(); uiServer.attach(ss); int numInputs = 4; int outputNum = 3; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .activation(Activation.TANH) .weightInit(WeightInit.XAVIER) .updater(new Sgd(0.03)) .l2(1e-4) .list() .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(3) .build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(3) .build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .activation(Activation.SOFTMAX) .nIn(3).nOut(outputNum).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); net.setListeners(new StatsListener(ss), new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 1000; i++) { net.fit(iter); } Thread.sleep(5000); } Thread.sleep(1000000); } @Test @Ignore public void testUICompGraph() throws Exception { StatsStorage ss = new InMemoryStatsStorage(); UIServer uiServer = UIServer.getInstance(); uiServer.attach(ss); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in") .addLayer("L0", new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build(), "in") .addLayer("L1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(4).nOut(3).build(), "L0") .setOutputs("L1").build(); ComputationGraph net = new ComputationGraph(conf); net.init(); net.setListeners(new StatsListener(ss), new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 100; i++) { net.fit(iter); Thread.sleep(100); } Thread.sleep(1000000); } @Test public void testAutoAttach() throws Exception { ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in") .addLayer("L0", new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build(), "in") .addLayer("L1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(4).nOut(3).build(), "L0") .setOutputs("L1").build(); ComputationGraph net = new ComputationGraph(conf); net.init(); StatsStorage ss1 = new InMemoryStatsStorage(); net.setListeners(new StatsListener(ss1, 1, "ss1")); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 5; i++) { net.fit(iter); } StatsStorage ss2 = new InMemoryStatsStorage(); net.setListeners(new StatsListener(ss2, 1, "ss2")); for (int i = 0; i < 4; i++) { net.fit(iter); } UIServer ui = UIServer.getInstance(true, null); try { ((VertxUIServer) ui).autoAttachStatsStorageBySessionId(new Function<String, StatsStorage>() { @Override public StatsStorage apply(String s) { if ("ss1".equals(s)) { return ss1; } else if ("ss2".equals(s)) { return ss2; } return null; } }); String json1 = IOUtils.toString(new URL("http://localhost:9000/train/ss1/overview/data"), StandardCharsets.UTF_8); // System.out.println(json1); String json2 = IOUtils.toString(new URL("http://localhost:9000/train/ss2/overview/data"), StandardCharsets.UTF_8); // System.out.println(json2); assertNotEquals(json1, json2); Map<String, Object> m1 = JsonMappers.getMapper().readValue(json1, Map.class); Map<String, Object> m2 = JsonMappers.getMapper().readValue(json2, Map.class); List<Object> s1 = (List<Object>) m1.get("scores"); List<Object> s2 = (List<Object>) m2.get("scores"); assertEquals(5, s1.size()); assertEquals(4, s2.size()); } finally { ui.stop(); } } @Test public void testUIAttachDetach() throws Exception { StatsStorage ss = new InMemoryStatsStorage(); UIServer uiServer = UIServer.getInstance(); uiServer.attach(ss); assertFalse(uiServer.getStatsStorageInstances().isEmpty()); uiServer.detach(ss); assertTrue(uiServer.getStatsStorageInstances().isEmpty()); } @Test public void testUIServerStop() { UIServer uiServer = UIServer.getInstance(true, null); assertTrue(uiServer.isMultiSession()); uiServer.stop(); uiServer = UIServer.getInstance(false, null); assertFalse(uiServer.isMultiSession()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.util.http; import java.io.Serializable; import java.text.DateFormat; import java.text.FieldPosition; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import java.util.TimeZone; import org.apache.tomcat.util.buf.MessageBytes; /** * Server-side cookie representation. * Allows recycling and uses MessageBytes as low-level * representation ( and thus the byte-> char conversion can be delayed * until we know the charset ). * * Tomcat.core uses this recyclable object to represent cookies, * and the facade will convert it to the external representation. */ public class ServerCookie implements Serializable { private static final long serialVersionUID = 1L; // Version 0 (Netscape) attributes private final MessageBytes name=MessageBytes.newInstance(); private final MessageBytes value=MessageBytes.newInstance(); // Expires - Not stored explicitly. Generated from Max-Age (see V1) private final MessageBytes path=MessageBytes.newInstance(); private final MessageBytes domain=MessageBytes.newInstance(); private boolean secure; // Version 1 (RFC2109) attributes private final MessageBytes comment=MessageBytes.newInstance(); private int maxAge = -1; private int version = 0; // Other fields private static final String OLD_COOKIE_PATTERN = "EEE, dd-MMM-yyyy HH:mm:ss z"; private static final ThreadLocal<DateFormat> OLD_COOKIE_FORMAT = new ThreadLocal<DateFormat>() { @Override protected DateFormat initialValue() { DateFormat df = new SimpleDateFormat(OLD_COOKIE_PATTERN, Locale.US); df.setTimeZone(TimeZone.getTimeZone("GMT")); return df; } }; private static final String ancientDate; static { ancientDate = OLD_COOKIE_FORMAT.get().format(new Date(10000)); } // Note: Servlet Spec =< 3.0 only refers to Netscape and RFC2109, // not RFC2965 // Version 2 (RFC2965) attributes that would need to be added to support // v2 cookies // CommentURL // Discard - implied by maxAge <0 // Port public ServerCookie() { // NOOP } public void recycle() { name.recycle(); value.recycle(); comment.recycle(); maxAge=-1; path.recycle(); domain.recycle(); version=0; secure=false; } public MessageBytes getComment() { return comment; } public MessageBytes getDomain() { return domain; } public void setMaxAge(int expiry) { maxAge = expiry; } public int getMaxAge() { return maxAge; } public MessageBytes getPath() { return path; } public void setSecure(boolean flag) { secure = flag; } public boolean getSecure() { return secure; } public MessageBytes getName() { return name; } public MessageBytes getValue() { return value; } public int getVersion() { return version; } public void setVersion(int v) { version = v; } // -------------------- utils -------------------- @Override public String toString() { return "Cookie " + getName() + "=" + getValue() + " ; " + getVersion() + " " + getPath() + " " + getDomain(); } // -------------------- Cookie parsing tools public static void appendCookieValue( StringBuffer headerBuf, int version, String name, String value, String path, String domain, String comment, int maxAge, boolean isSecure, boolean isHttpOnly) { StringBuffer buf = new StringBuffer(); // Servlet implementation checks name buf.append( name ); buf.append("="); // Servlet implementation does not check anything else /* * The spec allows some latitude on when to send the version attribute * with a Set-Cookie header. To be nice to clients, we'll make sure the * version attribute is first. That means checking the various things * that can cause us to switch to a v1 cookie first. * * Note that by checking for tokens we will also throw an exception if a * control character is encountered. */ // Start by using the version we were asked for int newVersion = version; // If it is v0, check if we need to switch if (newVersion == 0 && (!CookieSupport.ALLOW_HTTP_SEPARATORS_IN_V0 && CookieSupport.isHttpToken(value) || CookieSupport.ALLOW_HTTP_SEPARATORS_IN_V0 && CookieSupport.isV0Token(value))) { // HTTP token in value - need to use v1 newVersion = 1; } if (newVersion == 0 && comment != null) { // Using a comment makes it a v1 cookie newVersion = 1; } if (newVersion == 0 && (!CookieSupport.ALLOW_HTTP_SEPARATORS_IN_V0 && CookieSupport.isHttpToken(path) || CookieSupport.ALLOW_HTTP_SEPARATORS_IN_V0 && CookieSupport.isV0Token(path))) { // HTTP token in path - need to use v1 newVersion = 1; } if (newVersion == 0 && (!CookieSupport.ALLOW_HTTP_SEPARATORS_IN_V0 && CookieSupport.isHttpToken(domain) || CookieSupport.ALLOW_HTTP_SEPARATORS_IN_V0 && CookieSupport.isV0Token(domain))) { // HTTP token in domain - need to use v1 newVersion = 1; } // Now build the cookie header // Value maybeQuote(buf, value); // Add version 1 specific information if (newVersion == 1) { // Version=1 ... required buf.append ("; Version=1"); // Comment=comment if ( comment!=null ) { buf.append ("; Comment="); maybeQuote(buf, comment); } } // Add domain information, if present if (domain!=null) { buf.append("; Domain="); maybeQuote(buf, domain); } // Max-Age=secs ... or use old "Expires" format if (maxAge >= 0) { if (newVersion > 0) { buf.append ("; Max-Age="); buf.append (maxAge); } // IE6, IE7 and possibly other browsers don't understand Max-Age. // They do understand Expires, even with V1 cookies! if (newVersion == 0 || CookieSupport.ALWAYS_ADD_EXPIRES) { // Wdy, DD-Mon-YY HH:MM:SS GMT ( Expires Netscape format ) buf.append ("; Expires="); // To expire immediately we need to set the time in past if (maxAge == 0) { buf.append( ancientDate ); } else { OLD_COOKIE_FORMAT.get().format( new Date(System.currentTimeMillis() + maxAge*1000L), buf, new FieldPosition(0)); } } } // Path=path if (path!=null) { buf.append ("; Path="); maybeQuote(buf, path); } // Secure if (isSecure) { buf.append ("; Secure"); } // HttpOnly if (isHttpOnly) { buf.append("; HttpOnly"); } headerBuf.append(buf); } /** * Quotes values if required. * @param buf * @param value */ private static void maybeQuote (StringBuffer buf, String value) { if (value==null || value.length()==0) { buf.append("\"\""); } else if (CookieSupport.alreadyQuoted(value)) { buf.append('"'); buf.append(escapeDoubleQuotes(value,1,value.length()-1)); buf.append('"'); } else if (CookieSupport.isHttpToken(value) && !CookieSupport.ALLOW_HTTP_SEPARATORS_IN_V0 || CookieSupport.isV0Token(value) && CookieSupport.ALLOW_HTTP_SEPARATORS_IN_V0) { buf.append('"'); buf.append(escapeDoubleQuotes(value,0,value.length())); buf.append('"'); } else { buf.append(value); } } /** * Escapes any double quotes in the given string. * * @param s the input string * @param beginIndex start index inclusive * @param endIndex exclusive * @return The (possibly) escaped string */ private static String escapeDoubleQuotes(String s, int beginIndex, int endIndex) { if (s == null || s.length() == 0 || s.indexOf('"') == -1) { return s; } StringBuffer b = new StringBuffer(); for (int i = beginIndex; i < endIndex; i++) { char c = s.charAt(i); if (c == '\\' ) { b.append(c); //ignore the character after an escape, just append it if (++i>=endIndex) { throw new IllegalArgumentException("Invalid escape character in cookie value."); } b.append(s.charAt(i)); } else if (c == '"') { b.append('\\').append('"'); } else { b.append(c); } } return b.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.security.access; import static org.junit.Assert.*; import java.util.UUID; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.TestTableName; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(MediumTests.class) public class TestScanEarlyTermination extends SecureTestUtil { private static final Log LOG = LogFactory.getLog(TestScanEarlyTermination.class); static { Logger.getLogger(AccessController.class).setLevel(Level.TRACE); Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE); Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE); } @Rule public TestTableName TEST_TABLE = new TestTableName(); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final byte[] TEST_FAMILY1 = Bytes.toBytes("f1"); private static final byte[] TEST_FAMILY2 = Bytes.toBytes("f2"); private static final byte[] TEST_ROW = Bytes.toBytes("testrow"); private static final byte[] TEST_Q1 = Bytes.toBytes("q1"); private static final byte[] TEST_Q2 = Bytes.toBytes("q2"); private static final byte[] ZERO = Bytes.toBytes(0L); private static Configuration conf; private static User USER_OWNER; private static User USER_OTHER; @BeforeClass public static void setupBeforeClass() throws Exception { // setup configuration conf = TEST_UTIL.getConfiguration(); // Enable security enableSecurity(conf); // Verify enableSecurity sets up what we require verifyConfiguration(conf); TEST_UTIL.startMiniCluster(); MasterCoprocessorHost cpHost = TEST_UTIL.getMiniHBaseCluster().getMaster() .getMasterCoprocessorHost(); cpHost.load(AccessController.class, Coprocessor.PRIORITY_HIGHEST, conf); AccessController ac = (AccessController) cpHost.findCoprocessor(AccessController.class.getName()); cpHost.createEnvironment(AccessController.class, ac, Coprocessor.PRIORITY_HIGHEST, 1, conf); RegionServerCoprocessorHost rsHost = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0) .getRegionServerCoprocessorHost(); rsHost.createEnvironment(AccessController.class, ac, Coprocessor.PRIORITY_HIGHEST, 1, conf); // Wait for the ACL table to become available TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName()); // create a set of test users USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]); USER_OTHER = User.createUserForTesting(conf, "other", new String[0]); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Before public void setUp() throws Exception { HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName()); htd.setOwner(USER_OWNER); HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY1); hcd.setMaxVersions(10); htd.addFamily(hcd); hcd = new HColumnDescriptor(TEST_FAMILY2); hcd.setMaxVersions(10); htd.addFamily(hcd); // Enable backwards compatible early termination behavior in the HTD. We // want to confirm that the per-table configuration is properly picked up. htd.setConfiguration(AccessControlConstants.CF_ATTRIBUTE_EARLY_OUT, "true"); admin.createTable(htd); TEST_UTIL.waitTableEnabled(TEST_TABLE.getTableName().getName()); } @After public void tearDown() throws Exception { // Clean the _acl_ table try { TEST_UTIL.deleteTable(TEST_TABLE.getTableName()); } catch (TableNotFoundException ex) { // Test deleted the table, no problem LOG.info("Test deleted table " + TEST_TABLE.getTableName()); } assertEquals(0, AccessControlLists.getTablePermissions(conf, TEST_TABLE.getTableName()).size()); } @Test public void testEarlyScanTermination() throws Exception { // Grant USER_OTHER access to TEST_FAMILY1 only grantOnTable(TEST_UTIL, USER_OTHER.getShortName(), TEST_TABLE.getTableName(), TEST_FAMILY1, null, Action.READ); // Set up test data verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Put put = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); t.put(put); // Set a READ cell ACL for USER_OTHER on this value in FAMILY2 put = new Put(TEST_ROW).add(TEST_FAMILY2, TEST_Q1, ZERO); put.setACL(USER_OTHER.getShortName(), new Permission(Action.READ)); t.put(put); // Set an empty cell ACL for USER_OTHER on this other value in FAMILY2 put = new Put(TEST_ROW).add(TEST_FAMILY2, TEST_Q2, ZERO); put.setACL(USER_OTHER.getShortName(), new Permission()); t.put(put); } finally { t.close(); } return null; } }, USER_OWNER); // A scan of FAMILY1 will be allowed verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Scan scan = new Scan().addFamily(TEST_FAMILY1); Result result = t.getScanner(scan).next(); if (result != null) { assertTrue("Improper exclusion", result.containsColumn(TEST_FAMILY1, TEST_Q1)); assertFalse("Improper inclusion", result.containsColumn(TEST_FAMILY2, TEST_Q1)); return result.listCells(); } return null; } finally { t.close(); } } }, USER_OTHER); // A scan of FAMILY1 and FAMILY2 will produce results for FAMILY1 without // throwing an exception, however no cells from FAMILY2 will be returned // because we early out checks at the CF level. verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Scan scan = new Scan(); Result result = t.getScanner(scan).next(); if (result != null) { assertTrue("Improper exclusion", result.containsColumn(TEST_FAMILY1, TEST_Q1)); assertFalse("Improper inclusion", result.containsColumn(TEST_FAMILY2, TEST_Q1)); return result.listCells(); } return null; } finally { t.close(); } } }, USER_OTHER); // A scan of FAMILY2 will throw an AccessDeniedException verifyDeniedWithException(new AccessTestAction() { @Override public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Scan scan = new Scan().addFamily(TEST_FAMILY2); Result result = t.getScanner(scan).next(); if (result != null) { return result.listCells(); } return null; } finally { t.close(); } } }, USER_OTHER); // Now grant USER_OTHER access to TEST_FAMILY2:TEST_Q2 grantOnTable(TEST_UTIL, USER_OTHER.getShortName(), TEST_TABLE.getTableName(), TEST_FAMILY2, TEST_Q2, Action.READ); // A scan of FAMILY1 and FAMILY2 will produce combined results. In FAMILY2 // we have access granted to Q2 at the CF level. Because we early out // checks at the CF level the cell ACL on Q1 also granting access is ignored. verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Scan scan = new Scan(); Result result = t.getScanner(scan).next(); if (result != null) { assertTrue("Improper exclusion", result.containsColumn(TEST_FAMILY1, TEST_Q1)); assertFalse("Improper inclusion", result.containsColumn(TEST_FAMILY2, TEST_Q1)); assertTrue("Improper exclusion", result.containsColumn(TEST_FAMILY2, TEST_Q2)); return result.listCells(); } return null; } finally { t.close(); } } }, USER_OTHER); // A scan of FAMILY1 and FAMILY2 will produce combined results. If we use // a cell first strategy then cell ACLs come into effect. In FAMILY2, that // cell ACL on Q1 now grants access and the empty permission set on Q2 now // denies access. verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); HTable t = new HTable(conf, TEST_TABLE.getTableName()); try { Scan scan = new Scan(); scan.setACLStrategy(true); Result result = t.getScanner(scan).next(); if (result != null) { assertTrue("Improper exclusion", result.containsColumn(TEST_FAMILY1, TEST_Q1)); assertTrue("Improper exclusion", result.containsColumn(TEST_FAMILY2, TEST_Q1)); assertFalse("Improper inclusion", result.containsColumn(TEST_FAMILY2, TEST_Q2)); return result.listCells(); } return null; } finally { t.close(); } } }, USER_OTHER); } }
/* * Autopsy Forensic Browser * * Copyright 2018 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.ingest; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNotNull; import junit.framework.Test; import org.netbeans.junit.NbModuleSuite; import org.netbeans.junit.NbTestCase; import org.openide.util.Exceptions; import junit.framework.Assert; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.ImageDSProcessor; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestJobSettings.IngestType; import org.sleuthkit.autopsy.modules.embeddedfileextractor.EmbeddedFileExtractorModuleFactory; import org.sleuthkit.autopsy.modules.hashdatabase.HashLookupModuleFactory; import org.sleuthkit.autopsy.testutils.CaseUtils; import org.sleuthkit.autopsy.testutils.IngestUtils; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.TskCoreException; /** * Functional tests for embedded files. */ public class EmbeddedFileTest extends NbTestCase { private static final String CASE_NAME = "EmbeddedFileTest"; private final Path IMAGE_PATH = Paths.get(this.getDataDir().toString(), "EmbeddedIM_img1_v2.vhd"); public static final String HASH_VALUE = "098f6bcd4621d373cade4e832627b4f6"; private static final int DEEP_FOLDER_COUNT = 25; private Case openCase; public static Test suite() { NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(EmbeddedFileTest.class). clusters(".*"). enableModules(".*"); return conf.suite(); } public EmbeddedFileTest(String name) { super(name); } @Override public void setUp() { openCase = CaseUtils.createAsCurrentCase(CASE_NAME); ImageDSProcessor dataSourceProcessor = new ImageDSProcessor(); IngestUtils.addDataSource(dataSourceProcessor, IMAGE_PATH); IngestModuleTemplate embeddedTemplate = IngestUtils.getIngestModuleTemplate(new EmbeddedFileExtractorModuleFactory()); IngestModuleTemplate hashLookupTemplate = IngestUtils.getIngestModuleTemplate(new HashLookupModuleFactory()); ArrayList<IngestModuleTemplate> templates = new ArrayList<>(); templates.add(embeddedTemplate); templates.add(hashLookupTemplate); IngestJobSettings ingestJobSettings = new IngestJobSettings(EmbeddedFileTest.class.getCanonicalName(), IngestType.FILES_ONLY, templates); try { IngestUtils.runIngestJob(openCase.getDataSources(), ingestJobSettings); } catch (TskCoreException ex) { Exceptions.printStackTrace(ex); Assert.fail(ex.getMessage()); } } @Override public void tearDown() { CaseUtils.closeCurrentCase(); } public void testAll() { Logger.getLogger(this.getClass().getName()).log(Level.INFO, "---------------------- Embedded File Tests ---------------------"); runTestEncryptionAndZipBomb(); runTestExtension(); runTestBigFolder(); runTestDeepFolder(); runTestEmbeddedFile(); runTestContent(); } private void runTestEncryptionAndZipBomb() { try { Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Starting"); List<AbstractFile> results = openCase.getSleuthkitCase().findAllFilesWhere("name LIKE '%%'"); final String zipBombSetName = "Possible Zip Bomb"; final String protectedName1 = "password_protected.zip"; final String protectedName2 = "level1_protected.zip"; final String protectedName3 = "42.zip"; final String depthZipBomb = "DepthTriggerZipBomb.zip"; final String ratioZipBomb = "RatioTriggerZipBomb.zip"; int zipBombs = 0; assertEquals("The number of files in the test image has changed", 2221, results.size()); int passwdProtectedZips = 0; for (AbstractFile file : results) { //.zip file has artifact TSK_ENCRYPTION_DETECTED if (file.getName().equalsIgnoreCase(protectedName1) || file.getName().equalsIgnoreCase(protectedName2) || file.getName().equalsIgnoreCase(protectedName3)) { ArrayList<BlackboardArtifact> artifacts = file.getAllArtifacts(); assertEquals("Password protected zip file " + file.getName() + " has incorrect number of artifacts", 1, artifacts.size()); for (BlackboardArtifact artifact : artifacts) { assertEquals("Artifact for password protected zip file " + file.getName() + " has incorrect type ID", artifact.getArtifactTypeID(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID()); passwdProtectedZips++; } } else if (file.getName().equalsIgnoreCase(depthZipBomb) || file.getName().equalsIgnoreCase(ratioZipBomb)) { ArrayList<BlackboardArtifact> artifacts = file.getAllArtifacts(); assertEquals("Zip bomb " + file.getName() + " has incorrect number of artifacts", 1, artifacts.size()); for (BlackboardArtifact artifact : artifacts) { assertEquals("Artifact for Zip bomb " + file.getName() + " has incorrect type ID", artifact.getArtifactTypeID(), BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID()); BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME)); assertNotNull("No attribute found for artifact on zip bomb " + file.getName(), attribute); assertEquals("Interesting artifact on file, " + file.getName() + ", does not reflect it being a zip bomb", zipBombSetName, attribute.getDisplayString()); zipBombs++; } } else {//No other files have artifact defined assertEquals("Unexpected file, " + file.getName() + ", has artifacts", 0, file.getAllArtifacts().size()); } } //Make sure 3 password protected zip files have been tested: password_protected.zip, level1_protected.zip and 42.zip that we download for bomb testing. assertEquals("Unexpected number of artifacts reflecting password protected zip files found", 3, passwdProtectedZips); //Make sure 2 zip bomb files have been tested: DepthTriggerZipBomb.zip and RatioTriggerZipBomb.zip. assertEquals("Unexpected number of artifacts reflecting zip bombs found", 2, zipBombs); } catch (TskCoreException ex) { Exceptions.printStackTrace(ex); Assert.fail(ex.getMessage()); } } private void runTestBigFolder() { final int numOfFilesToTest = 1000; try { Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Starting"); //Get all files under 'big folder' directory except '.' '..' 'slack' files List<AbstractFile> results = openCase.getSleuthkitCase().findAllFilesWhere("parent_path LIKE '%big folder/' and name != '.' and name != '..' and extension NOT LIKE '%slack'"); assertEquals(numOfFilesToTest, results.size()); //There are 1000 files int numOfFilesTested = 0; for (AbstractFile file : results) { String fileName = file.getName(); //File name should like file1.txt, file2.txt ... file1000.txt String errMsg = String.format("File name %s doesn't follow the expected naming convention: fileNaturalNumber.txt, eg. file234.txt.", fileName); assertTrue(errMsg, file.getName().matches("file[1-9]\\d*.txt")); String hashValue = file.getMd5Hash(); //All files have the same hash value assertEquals(HASH_VALUE, hashValue); numOfFilesTested++; } //Make sure 1000 files have been tested assertEquals(numOfFilesToTest, numOfFilesTested); } catch (TskCoreException ex) { Exceptions.printStackTrace(ex); Assert.fail(ex.getMessage()); } } private void runTestDeepFolder() { try { Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Starting"); //Get all files under 'deep folder' directory except '.' '..' List<AbstractFile> results = openCase.getSleuthkitCase().findAllFilesWhere("parent_path LIKE '%deep folder/' and name != '.' and name != '..'"); assertEquals(1, results.size()); StringBuffer dirReached = new StringBuffer(); ArrayList<String> fileReached = new ArrayList<>(); checkEachFileInDeepFolder(results.get(0), dirReached, fileReached, 0); //Check that all 25 folders/files have been reached assertEquals(DEEP_FOLDER_COUNT, fileReached.size()); //Make sure the test reached the last directory 'dir25'. The whole directory is dir1/dir2...dir24/dir25/ assertTrue(dirReached.toString().startsWith("dir1/dir2/")); assertTrue(dirReached.toString().endsWith("dir24/dir25/")); //Make sure the test reached the last file.txt in dir1/dir2...dir24/dir25/ assertTrue(fileReached.get(0).endsWith(dirReached.toString() + "file.txt")); } catch (TskCoreException ex) { Exceptions.printStackTrace(ex); Assert.fail(ex.getMessage()); } } private void runTestEmbeddedFile() { try { Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Starting"); //Query level3.txt under '/ZIP/embedded/level3.zip/' List<AbstractFile> results = openCase.getSleuthkitCase().findAllFilesWhere("name = 'level3.txt' and parent_path = '/ZIP/embedded/level3.zip/'"); assertEquals(1, results.size()); //Query level2.txt under '/ZIP/embedded/level3.zip/level2.zip/' results = openCase.getSleuthkitCase().findAllFilesWhere("name = 'level2.txt' and parent_path = '/ZIP/embedded/level3.zip/level2.zip/'"); assertEquals(1, results.size()); //Query level1.txt under '/ZIP/embedded/level3.zip/level2.zip/level1.zip/' results = openCase.getSleuthkitCase().findAllFilesWhere("name = 'level1.txt' and parent_path = '/ZIP/embedded/level3.zip/level2.zip/level1.zip/'"); assertEquals(1, results.size()); //Confirm that we can reach level1.txt from the embedded folder results = openCase.getSleuthkitCase().findAllFilesWhere("parent_path LIKE '%embedded/' and name != '.' and name != '..' and extension NOT LIKE '%slack%'"); assertEquals(1, results.size()); assertTrue(checkFileInEmbeddedFolder(results.get(0))); } catch (TskCoreException ex) { Exceptions.printStackTrace(ex); Assert.fail(ex.getMessage()); } } private void runTestContent() { final int numOfFilesToTest = 1029; try { Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Starting"); //All files with txt extension should have the same hash value, //except the zip file with txt extension and the .txt files extracted from password protected zip shouldn't have hash value List<AbstractFile> results = openCase.getSleuthkitCase().findAllFilesWhere( "extension = 'txt' and name != 'zipFileWithTxtExtension.txt' and parent_path NOT LIKE '%_protected%'"); assertEquals(numOfFilesToTest, results.size()); int numOfHashTested = 0; for (AbstractFile file : results) { String fileName = file.getName(); String errMsg = String.format("File name %s doesn't have the extected hash value %s.", fileName, HASH_VALUE); assertEquals(errMsg, HASH_VALUE, file.getMd5Hash()); numOfHashTested++; } //Make sure the hash value of 1029 files have been tested assertEquals(numOfFilesToTest, numOfHashTested); } catch (TskCoreException ex) { Exceptions.printStackTrace(ex); Assert.fail(ex.getMessage()); } } private void runTestExtension() { try { Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Starting"); //Query zipFileWithTxtExtension.txt at extension folder List<AbstractFile> results = openCase.getSleuthkitCase().findAllFilesWhere("extension = 'txt' and parent_path = '/ZIP/extension/zipFileWithTxtExtension.txt/'"); assertEquals(1, results.size()); assertEquals("file.txt wasn't extracted from the file: zipFileWithTxtExtension.txt", "file.txt", results.get(0).getName()); } catch (TskCoreException ex) { Exceptions.printStackTrace(ex); Assert.fail(ex.getMessage()); } } private void checkEachFileInDeepFolder(AbstractFile file, StringBuffer dirReached, ArrayList<String> fileReached, int numOfDir) { String errMsg = String.format("File/Directory name is not as expected name: %s", file.getName()); if (file.isDir() && !file.getName().equals(".") && !file.getName().equals("..")) { numOfDir++; assertEquals(errMsg, String.format("dir%d", numOfDir), file.getName()); dirReached.append(file.getName()).append("/"); try { List<AbstractFile> children = file.listFiles(); for (AbstractFile child : children) { checkEachFileInDeepFolder(child, dirReached, fileReached, numOfDir); } } catch (TskCoreException ex) { Exceptions.printStackTrace(ex); Assert.fail(ex.getMessage()); } } else if (file.isFile() && !file.getName().endsWith("slack")) { assertEquals(errMsg, "file.txt", file.getName()); fileReached.add(file.getParentPath() + file.getName()); } } private boolean checkFileInEmbeddedFolder(AbstractFile file) { if (file.getName().equals("level1.txt")) { return true; } else if (file.getNameExtension().equalsIgnoreCase("zip")) { try { List<AbstractFile> children = file.listFiles(); for (AbstractFile child : children) { return checkFileInEmbeddedFolder(child); } } catch (TskCoreException ex) { Exceptions.printStackTrace(ex); Assert.fail(ex.getMessage()); } } else { assertTrue(file.getNameExtension().equalsIgnoreCase("txt")); } return false; } }
package org.apache.ddlutils; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.IOException; import java.io.StringWriter; import org.apache.ddlutils.io.DatabaseIO; import org.apache.ddlutils.model.Database; import org.apache.ddlutils.platform.SqlBuilder; /** * Base class for builder tests. * * @version $Revision$ */ public abstract class TestPlatformBase extends TestBase { /** The tested platform. */ private Platform _platform; /** The writer that the builder of the platform writes to. */ private StringWriter _writer; /** * {@inheritDoc} */ protected void setUp() throws Exception { _writer = new StringWriter(); _platform = PlatformFactory.createNewPlatformInstance(getDatabaseName()); _platform.getSqlBuilder().setWriter(_writer); if (_platform.getPlatformInfo().isDelimitedIdentifiersSupported()) { _platform.setDelimitedIdentifierModeOn(true); } } /** * {@inheritDoc} */ protected void tearDown() throws Exception { _platform = null; _writer = null; } /** * Returns the tested platform. * * @return The platform */ protected Platform getPlatform() { return _platform; } /** * Returns the info object of the tested platform. * * @return The platform info object */ protected PlatformInfo getPlatformInfo() { return getPlatform().getPlatformInfo(); } /** * Returns the SQL builder of the tested platform. * * @return The builder object */ protected SqlBuilder getSqlBuilder() { return getPlatform().getSqlBuilder(); } /** * Returns the builder output so far. * * @return The output */ protected String getBuilderOutput() { return _writer.toString(); } /** * Returns the name of the tested database. * * @return The database name */ protected abstract String getDatabaseName(); /** * Returns the database creation SQL for the given database schema. * * @param schema The database schema XML * @return The SQL */ protected String getDatabaseCreationSql(String schema) throws IOException { Database testDb = parseDatabaseFromString(schema); // we're turning the comment creation off to make testing easier getPlatform().setSqlCommentsOn(false); getPlatform().getSqlBuilder().createTables(testDb); return getBuilderOutput(); } /** * Returns the SQL to create the test database for the column tests. * * @return The SQL */ protected String getColumnTestDatabaseCreationSql() throws IOException { final String schema = "<?xml version='1.0' encoding='ISO-8859-1'?>\n" + "<database xmlns='" + DatabaseIO.DDLUTILS_NAMESPACE + "' name='datatypetest'>\n" + " <table name='coltype'>\n" + " <column name='COL_ARRAY' type='ARRAY'/>\n" + " <column name='COL_BIGINT' type='BIGINT'/>\n" + " <column name='COL_BINARY' type='BINARY'/>\n" + " <column name='COL_BIT' type='BIT'/>\n" + " <column name='COL_BLOB' type='BLOB'/>\n" + " <column name='COL_BOOLEAN' type='BOOLEAN'/>\n" + " <column name='COL_CHAR' type='CHAR' size='15'/>\n" + " <column name='COL_CLOB' type='CLOB'/>\n" + " <column name='COL_DATALINK' type='DATALINK'/>\n" + " <column name='COL_DATE' type='DATE'/>\n" + " <column name='COL_DECIMAL' type='DECIMAL' size='15,3'/>\n" + " <column name='COL_DECIMAL_NOSCALE' type='DECIMAL' size='15'/>\n" + " <column name='COL_DISTINCT' type='DISTINCT'/>\n" + " <column name='COL_DOUBLE' type='DOUBLE'/>\n" + " <column name='COL_FLOAT' type='FLOAT'/>\n" + " <column name='COL_INTEGER' type='INTEGER'/>\n" + " <column name='COL_JAVA_OBJECT' type='JAVA_OBJECT'/>\n" + " <column name='COL_LONGVARBINARY' type='LONGVARBINARY'/>\n" + " <column name='COL_LONGVARCHAR' type='LONGVARCHAR'/>\n" + " <column name='COL_NULL' type='NULL'/>\n" + " <column name='COL_NUMERIC' type='NUMERIC' size='15' />\n" + " <column name='COL_OTHER' type='OTHER'/>\n" + " <column name='COL_REAL' type='REAL'/>\n" + " <column name='COL_REF' type='REF'/>\n" + " <column name='COL_SMALLINT' type='SMALLINT' size='5'/>\n" + " <column name='COL_STRUCT' type='STRUCT'/>\n" + " <column name='COL_TIME' type='TIME'/>\n" + " <column name='COL_TIMESTAMP' type='TIMESTAMP'/>\n" + " <column name='COL_TINYINT' type='TINYINT'/>\n" + " <column name='COL_VARBINARY' type='VARBINARY' size='15'/>\n" + " <column name='COL_VARCHAR' type='VARCHAR' size='15'/>\n" + " </table>\n" + "</database>"; return getDatabaseCreationSql(schema); } /** * Returns the SQL to create the test database for the constraint tests. * * @return The SQL */ protected String getConstraintTestDatabaseCreationSql() throws IOException { final String schema = "<?xml version='1.0' encoding='ISO-8859-1'?>\n" + "<database xmlns='" + DatabaseIO.DDLUTILS_NAMESPACE + "' name='columnconstraintstest'>\n" + " <table name='constraints'>\n" + " <column name='COL_PK' type='VARCHAR' size='32' primaryKey='true'/>\n" + " <column name='COL_PK_AUTO_INCR' type='INTEGER' primaryKey='true' autoIncrement='true'/>\n" + " <column name='COL_NOT_NULL' type='BINARY' size='100' required='true'/>\n" + " <column name='COL_NOT_NULL_DEFAULT' type='DOUBLE' required='true' default='-2.0'/>\n" + " <column name='COL_DEFAULT' type='CHAR' size='4' default='test'/>\n" + " <column name='COL_AUTO_INCR' type='BIGINT' autoIncrement='true'/>\n" + " </table>\n" + "</database>"; return getDatabaseCreationSql(schema); } /** * Returns the SQL to create the test database for the table-level constraint tests. * * @return The SQL */ protected String getTableConstraintTestDatabaseCreationSql() throws IOException { final String schema = "<?xml version='1.0' encoding='ISO-8859-1'?>\n" + "<database xmlns='" + DatabaseIO.DDLUTILS_NAMESPACE + "' name='tableconstraintstest'>\n" + " <table name='table1'>\n" + " <column name='COL_PK_1' type='VARCHAR' size='32' primaryKey='true' required='true'/>\n" + " <column name='COL_PK_2' type='INTEGER' primaryKey='true'/>\n" + " <column name='COL_INDEX_1' type='BINARY' size='100' required='true'/>\n" + " <column name='COL_INDEX_2' type='DOUBLE' required='true'/>\n" + " <column name='COL_INDEX_3' type='CHAR' size='4'/>\n" + " <index name='testindex1'>\n" + " <index-column name='COL_INDEX_2'/>\n" + " </index>\n" + " <unique name='testindex2'>\n" + " <unique-column name='COL_INDEX_3'/>\n" + " <unique-column name='COL_INDEX_1'/>\n" + " </unique>\n" + " </table>\n" + " <table name='table2'>\n" + " <column name='COL_PK' type='INTEGER' primaryKey='true'/>\n" + " <column name='COL_FK_1' type='INTEGER'/>\n" + " <column name='COL_FK_2' type='VARCHAR' size='32' required='true'/>\n" + " <foreign-key foreignTable='table1'>\n" + " <reference local='COL_FK_1' foreign='COL_PK_2'/>\n" + " <reference local='COL_FK_2' foreign='COL_PK_1'/>\n" + " </foreign-key>\n" + " </table>\n" + " <table name='table3'>\n" + " <column name='COL_PK' type='VARCHAR' size='16' primaryKey='true'/>\n" + " <column name='COL_FK' type='INTEGER' required='true'/>\n" + " <foreign-key name='testfk' foreignTable='table2'>\n" + " <reference local='COL_FK' foreign='COL_PK'/>\n" + " </foreign-key>\n" + " </table>\n" + "</database>"; return getDatabaseCreationSql(schema); } /** * Returns the SQL to create the test database for testing character escaping. * * @return The SQL */ protected String getCharEscapingTestDatabaseCreationSql() throws IOException { final String schema = "<?xml version='1.0' encoding='ISO-8859-1'?>\n" + "<database xmlns='" + DatabaseIO.DDLUTILS_NAMESPACE + "' name='escapetest'>\n" + " <table name='escapedcharacters'>\n" + " <column name='COL_PK' type='INTEGER' primaryKey='true'/>\n" + " <column name='COL_TEXT' type='VARCHAR' size='128' default='&#39;'/>\n" + " </table>\n" + "</database>"; return getDatabaseCreationSql(schema); } }
/* Copyright (c) 2011 Stanislav Vitvitskiy Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.googlecode.mp4parser.h264.model; import com.googlecode.mp4parser.h264.read.CAVLCReader; import com.googlecode.mp4parser.h264.write.CAVLCWriter; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * Sequence Parameter Set structure of h264 bitstream * <p> * capable to serialize and deserialize with CAVLC bitstream</p> * * @author Stanislav Vitvitskiy */ public class SeqParameterSet extends BitstreamElement { public int pic_order_cnt_type; public boolean field_pic_flag; public boolean delta_pic_order_always_zero_flag; public boolean weighted_pred_flag; public int weighted_bipred_idc; public boolean entropy_coding_mode_flag; public boolean mb_adaptive_frame_field_flag; public boolean direct_8x8_inference_flag; public ChromaFormat chroma_format_idc; public int log2_max_frame_num_minus4; public int log2_max_pic_order_cnt_lsb_minus4; public int pic_height_in_map_units_minus1; public int pic_width_in_mbs_minus1; public int bit_depth_luma_minus8; public int bit_depth_chroma_minus8; public boolean qpprime_y_zero_transform_bypass_flag; public int profile_idc; public long reserved_zero_2bits; public boolean constraint_set_0_flag; public boolean constraint_set_1_flag; public boolean constraint_set_2_flag; public boolean constraint_set_3_flag; public boolean constraint_set_4_flag; public boolean constraint_set_5_flag; public int level_idc; public int seq_parameter_set_id; public boolean residual_color_transform_flag; public int offset_for_non_ref_pic; public int offset_for_top_to_bottom_field; public int num_ref_frames; public boolean gaps_in_frame_num_value_allowed_flag; public boolean frame_mbs_only_flag; public boolean frame_cropping_flag; public int frame_crop_left_offset; public int frame_crop_right_offset; public int frame_crop_top_offset; public int frame_crop_bottom_offset; public int[] offsetForRefFrame; public VUIParameters vuiParams; public ScalingMatrix scalingMatrix; public int num_ref_frames_in_pic_order_cnt_cycle; public static SeqParameterSet read(byte[] b) throws IOException { return read(new ByteArrayInputStream(b)); } public static SeqParameterSet read(InputStream is) throws IOException { CAVLCReader reader = new CAVLCReader(is); SeqParameterSet sps = new SeqParameterSet(); sps.profile_idc = (int) reader.readNBit(8, "SPS: profile_idc"); sps.constraint_set_0_flag = reader .readBool("SPS: constraint_set_0_flag"); sps.constraint_set_1_flag = reader .readBool("SPS: constraint_set_1_flag"); sps.constraint_set_2_flag = reader .readBool("SPS: constraint_set_2_flag"); sps.constraint_set_3_flag = reader .readBool("SPS: constraint_set_3_flag"); sps.constraint_set_4_flag = reader .readBool("SPS: constraint_set_4_flag"); sps.constraint_set_5_flag = reader .readBool("SPS: constraint_set_5_flag"); sps.reserved_zero_2bits = reader.readNBit(2, "SPS: reserved_zero_2bits"); sps.level_idc = (int) reader.readNBit(8, "SPS: level_idc"); sps.seq_parameter_set_id = reader.readUE("SPS: seq_parameter_set_id"); if (sps.profile_idc == 100 || sps.profile_idc == 110 || sps.profile_idc == 122 || sps.profile_idc == 144) { sps.chroma_format_idc = ChromaFormat.fromId(reader .readUE("SPS: chroma_format_idc")); if (sps.chroma_format_idc == ChromaFormat.YUV_444) { sps.residual_color_transform_flag = reader .readBool("SPS: residual_color_transform_flag"); } sps.bit_depth_luma_minus8 = reader .readUE("SPS: bit_depth_luma_minus8"); sps.bit_depth_chroma_minus8 = reader .readUE("SPS: bit_depth_chroma_minus8"); sps.qpprime_y_zero_transform_bypass_flag = reader .readBool("SPS: qpprime_y_zero_transform_bypass_flag"); boolean seqScalingMatrixPresent = reader .readBool("SPS: seq_scaling_matrix_present_lag"); if (seqScalingMatrixPresent) { readScalingListMatrix(reader, sps); } } else { sps.chroma_format_idc = ChromaFormat.YUV_420; } sps.log2_max_frame_num_minus4 = reader .readUE("SPS: log2_max_frame_num_minus4"); sps.pic_order_cnt_type = reader.readUE("SPS: pic_order_cnt_type"); if (sps.pic_order_cnt_type == 0) { sps.log2_max_pic_order_cnt_lsb_minus4 = reader .readUE("SPS: log2_max_pic_order_cnt_lsb_minus4"); } else if (sps.pic_order_cnt_type == 1) { sps.delta_pic_order_always_zero_flag = reader .readBool("SPS: delta_pic_order_always_zero_flag"); sps.offset_for_non_ref_pic = reader .readSE("SPS: offset_for_non_ref_pic"); sps.offset_for_top_to_bottom_field = reader .readSE("SPS: offset_for_top_to_bottom_field"); sps.num_ref_frames_in_pic_order_cnt_cycle = reader .readUE("SPS: num_ref_frames_in_pic_order_cnt_cycle"); sps.offsetForRefFrame = new int[sps.num_ref_frames_in_pic_order_cnt_cycle]; for (int i = 0; i < sps.num_ref_frames_in_pic_order_cnt_cycle; i++) { sps.offsetForRefFrame[i] = reader .readSE("SPS: offsetForRefFrame [" + i + "]"); } } sps.num_ref_frames = reader.readUE("SPS: num_ref_frames"); sps.gaps_in_frame_num_value_allowed_flag = reader .readBool("SPS: gaps_in_frame_num_value_allowed_flag"); sps.pic_width_in_mbs_minus1 = reader .readUE("SPS: pic_width_in_mbs_minus1"); sps.pic_height_in_map_units_minus1 = reader .readUE("SPS: pic_height_in_map_units_minus1"); sps.frame_mbs_only_flag = reader.readBool("SPS: frame_mbs_only_flag"); if (!sps.frame_mbs_only_flag) { sps.mb_adaptive_frame_field_flag = reader .readBool("SPS: mb_adaptive_frame_field_flag"); } sps.direct_8x8_inference_flag = reader .readBool("SPS: direct_8x8_inference_flag"); sps.frame_cropping_flag = reader.readBool("SPS: frame_cropping_flag"); if (sps.frame_cropping_flag) { sps.frame_crop_left_offset = reader .readUE("SPS: frame_crop_left_offset"); sps.frame_crop_right_offset = reader .readUE("SPS: frame_crop_right_offset"); sps.frame_crop_top_offset = reader .readUE("SPS: frame_crop_top_offset"); sps.frame_crop_bottom_offset = reader .readUE("SPS: frame_crop_bottom_offset"); } boolean vui_parameters_present_flag = reader .readBool("SPS: vui_parameters_present_flag"); if (vui_parameters_present_flag) sps.vuiParams = ReadVUIParameters(reader); reader.readTrailingBits(); return sps; } private static void readScalingListMatrix(CAVLCReader reader, SeqParameterSet sps) throws IOException { sps.scalingMatrix = new ScalingMatrix(); for (int i = 0; i < 8; i++) { boolean seqScalingListPresentFlag = reader .readBool("SPS: seqScalingListPresentFlag"); if (seqScalingListPresentFlag) { sps.scalingMatrix.ScalingList4x4 = new ScalingList[8]; sps.scalingMatrix.ScalingList8x8 = new ScalingList[8]; if (i < 6) { sps.scalingMatrix.ScalingList4x4[i] = ScalingList.read( reader, 16); } else { sps.scalingMatrix.ScalingList8x8[i - 6] = ScalingList.read( reader, 64); } } } } private static VUIParameters ReadVUIParameters(CAVLCReader reader) throws IOException { VUIParameters vuip = new VUIParameters(); vuip.aspect_ratio_info_present_flag = reader .readBool("VUI: aspect_ratio_info_present_flag"); if (vuip.aspect_ratio_info_present_flag) { vuip.aspect_ratio = AspectRatio.fromValue((int) reader.readNBit(8, "VUI: aspect_ratio")); if (vuip.aspect_ratio == AspectRatio.Extended_SAR) { vuip.sar_width = (int) reader.readNBit(16, "VUI: sar_width"); vuip.sar_height = (int) reader.readNBit(16, "VUI: sar_height"); } } vuip.overscan_info_present_flag = reader .readBool("VUI: overscan_info_present_flag"); if (vuip.overscan_info_present_flag) { vuip.overscan_appropriate_flag = reader .readBool("VUI: overscan_appropriate_flag"); } vuip.video_signal_type_present_flag = reader .readBool("VUI: video_signal_type_present_flag"); if (vuip.video_signal_type_present_flag) { vuip.video_format = (int) reader.readNBit(3, "VUI: video_format"); vuip.video_full_range_flag = reader .readBool("VUI: video_full_range_flag"); vuip.colour_description_present_flag = reader .readBool("VUI: colour_description_present_flag"); if (vuip.colour_description_present_flag) { vuip.colour_primaries = (int) reader.readNBit(8, "VUI: colour_primaries"); vuip.transfer_characteristics = (int) reader.readNBit(8, "VUI: transfer_characteristics"); vuip.matrix_coefficients = (int) reader.readNBit(8, "VUI: matrix_coefficients"); } } vuip.chroma_loc_info_present_flag = reader .readBool("VUI: chroma_loc_info_present_flag"); if (vuip.chroma_loc_info_present_flag) { vuip.chroma_sample_loc_type_top_field = reader .readUE("VUI chroma_sample_loc_type_top_field"); vuip.chroma_sample_loc_type_bottom_field = reader .readUE("VUI chroma_sample_loc_type_bottom_field"); } vuip.timing_info_present_flag = reader .readBool("VUI: timing_info_present_flag"); if (vuip.timing_info_present_flag) { vuip.num_units_in_tick = (int) reader.readNBit(32, "VUI: num_units_in_tick"); vuip.time_scale = (int) reader.readNBit(32, "VUI: time_scale"); vuip.fixed_frame_rate_flag = reader .readBool("VUI: fixed_frame_rate_flag"); } boolean nal_hrd_parameters_present_flag = reader .readBool("VUI: nal_hrd_parameters_present_flag"); if (nal_hrd_parameters_present_flag) vuip.nalHRDParams = readHRDParameters(reader); boolean vcl_hrd_parameters_present_flag = reader .readBool("VUI: vcl_hrd_parameters_present_flag"); if (vcl_hrd_parameters_present_flag) vuip.vclHRDParams = readHRDParameters(reader); if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) { vuip.low_delay_hrd_flag = reader .readBool("VUI: low_delay_hrd_flag"); } vuip.pic_struct_present_flag = reader .readBool("VUI: pic_struct_present_flag"); boolean bitstream_restriction_flag = reader .readBool("VUI: bitstream_restriction_flag"); if (bitstream_restriction_flag) { vuip.bitstreamRestriction = new VUIParameters.BitstreamRestriction(); vuip.bitstreamRestriction.motion_vectors_over_pic_boundaries_flag = reader .readBool("VUI: motion_vectors_over_pic_boundaries_flag"); vuip.bitstreamRestriction.max_bytes_per_pic_denom = reader .readUE("VUI max_bytes_per_pic_denom"); vuip.bitstreamRestriction.max_bits_per_mb_denom = reader .readUE("VUI max_bits_per_mb_denom"); vuip.bitstreamRestriction.log2_max_mv_length_horizontal = reader .readUE("VUI log2_max_mv_length_horizontal"); vuip.bitstreamRestriction.log2_max_mv_length_vertical = reader .readUE("VUI log2_max_mv_length_vertical"); vuip.bitstreamRestriction.num_reorder_frames = reader .readUE("VUI num_reorder_frames"); vuip.bitstreamRestriction.max_dec_frame_buffering = reader .readUE("VUI max_dec_frame_buffering"); } return vuip; } private static HRDParameters readHRDParameters(CAVLCReader reader) throws IOException { HRDParameters hrd = new HRDParameters(); hrd.cpb_cnt_minus1 = reader.readUE("SPS: cpb_cnt_minus1"); hrd.bit_rate_scale = (int) reader.readNBit(4, "HRD: bit_rate_scale"); hrd.cpb_size_scale = (int) reader.readNBit(4, "HRD: cpb_size_scale"); hrd.bit_rate_value_minus1 = new int[hrd.cpb_cnt_minus1 + 1]; hrd.cpb_size_value_minus1 = new int[hrd.cpb_cnt_minus1 + 1]; hrd.cbr_flag = new boolean[hrd.cpb_cnt_minus1 + 1]; for (int SchedSelIdx = 0; SchedSelIdx <= hrd.cpb_cnt_minus1; SchedSelIdx++) { hrd.bit_rate_value_minus1[SchedSelIdx] = reader .readUE("HRD: bit_rate_value_minus1"); hrd.cpb_size_value_minus1[SchedSelIdx] = reader .readUE("HRD: cpb_size_value_minus1"); hrd.cbr_flag[SchedSelIdx] = reader.readBool("HRD: cbr_flag"); } hrd.initial_cpb_removal_delay_length_minus1 = (int) reader.readNBit(5, "HRD: initial_cpb_removal_delay_length_minus1"); hrd.cpb_removal_delay_length_minus1 = (int) reader.readNBit(5, "HRD: cpb_removal_delay_length_minus1"); hrd.dpb_output_delay_length_minus1 = (int) reader.readNBit(5, "HRD: dpb_output_delay_length_minus1"); hrd.time_offset_length = (int) reader.readNBit(5, "HRD: time_offset_length"); return hrd; } public void write(OutputStream out) throws IOException { CAVLCWriter writer = new CAVLCWriter(out); writer.writeNBit(profile_idc, 8, "SPS: profile_idc"); writer.writeBool(constraint_set_0_flag, "SPS: constraint_set_0_flag"); writer.writeBool(constraint_set_1_flag, "SPS: constraint_set_1_flag"); writer.writeBool(constraint_set_2_flag, "SPS: constraint_set_2_flag"); writer.writeBool(constraint_set_3_flag, "SPS: constraint_set_3_flag"); writer.writeNBit(0, 4, "SPS: reserved"); writer.writeNBit(level_idc, 8, "SPS: level_idc"); writer.writeUE(seq_parameter_set_id, "SPS: seq_parameter_set_id"); if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 || profile_idc == 144) { writer.writeUE(chroma_format_idc.getId(), "SPS: chroma_format_idc"); if (chroma_format_idc == ChromaFormat.YUV_444) { writer.writeBool(residual_color_transform_flag, "SPS: residual_color_transform_flag"); } writer.writeUE(bit_depth_luma_minus8, "SPS: "); writer.writeUE(bit_depth_chroma_minus8, "SPS: "); writer.writeBool(qpprime_y_zero_transform_bypass_flag, "SPS: qpprime_y_zero_transform_bypass_flag"); writer.writeBool(scalingMatrix != null, "SPS: "); if (scalingMatrix != null) { for (int i = 0; i < 8; i++) { if (i < 6) { writer.writeBool( scalingMatrix.ScalingList4x4[i] != null, "SPS: "); if (scalingMatrix.ScalingList4x4[i] != null) { scalingMatrix.ScalingList4x4[i].write(writer); } } else { writer.writeBool( scalingMatrix.ScalingList8x8[i - 6] != null, "SPS: "); if (scalingMatrix.ScalingList8x8[i - 6] != null) { scalingMatrix.ScalingList8x8[i - 6].write(writer); } } } } } writer.writeUE(log2_max_frame_num_minus4, "SPS: log2_max_frame_num_minus4"); writer.writeUE(pic_order_cnt_type, "SPS: pic_order_cnt_type"); if (pic_order_cnt_type == 0) { writer.writeUE(log2_max_pic_order_cnt_lsb_minus4, "SPS: log2_max_pic_order_cnt_lsb_minus4"); } else if (pic_order_cnt_type == 1) { writer.writeBool(delta_pic_order_always_zero_flag, "SPS: delta_pic_order_always_zero_flag"); writer.writeSE(offset_for_non_ref_pic, "SPS: offset_for_non_ref_pic"); writer.writeSE(offset_for_top_to_bottom_field, "SPS: offset_for_top_to_bottom_field"); writer.writeUE(offsetForRefFrame.length, "SPS: "); for (int i = 0; i < offsetForRefFrame.length; i++) writer.writeSE(offsetForRefFrame[i], "SPS: "); } writer.writeUE(num_ref_frames, "SPS: num_ref_frames"); writer.writeBool(gaps_in_frame_num_value_allowed_flag, "SPS: gaps_in_frame_num_value_allowed_flag"); writer.writeUE(pic_width_in_mbs_minus1, "SPS: pic_width_in_mbs_minus1"); writer.writeUE(pic_height_in_map_units_minus1, "SPS: pic_height_in_map_units_minus1"); writer.writeBool(frame_mbs_only_flag, "SPS: frame_mbs_only_flag"); if (!frame_mbs_only_flag) { writer.writeBool(mb_adaptive_frame_field_flag, "SPS: mb_adaptive_frame_field_flag"); } writer.writeBool(direct_8x8_inference_flag, "SPS: direct_8x8_inference_flag"); writer.writeBool(frame_cropping_flag, "SPS: frame_cropping_flag"); if (frame_cropping_flag) { writer.writeUE(frame_crop_left_offset, "SPS: frame_crop_left_offset"); writer.writeUE(frame_crop_right_offset, "SPS: frame_crop_right_offset"); writer.writeUE(frame_crop_top_offset, "SPS: frame_crop_top_offset"); writer.writeUE(frame_crop_bottom_offset, "SPS: frame_crop_bottom_offset"); } writer.writeBool(vuiParams != null, "SPS: "); if (vuiParams != null) writeVUIParameters(vuiParams, writer); writer.writeTrailingBits(); } private void writeVUIParameters(VUIParameters vuip, CAVLCWriter writer) throws IOException { writer.writeBool(vuip.aspect_ratio_info_present_flag, "VUI: aspect_ratio_info_present_flag"); if (vuip.aspect_ratio_info_present_flag) { writer.writeNBit(vuip.aspect_ratio.getValue(), 8, "VUI: aspect_ratio"); if (vuip.aspect_ratio == AspectRatio.Extended_SAR) { writer.writeNBit(vuip.sar_width, 16, "VUI: sar_width"); writer.writeNBit(vuip.sar_height, 16, "VUI: sar_height"); } } writer.writeBool(vuip.overscan_info_present_flag, "VUI: overscan_info_present_flag"); if (vuip.overscan_info_present_flag) { writer.writeBool(vuip.overscan_appropriate_flag, "VUI: overscan_appropriate_flag"); } writer.writeBool(vuip.video_signal_type_present_flag, "VUI: video_signal_type_present_flag"); if (vuip.video_signal_type_present_flag) { writer.writeNBit(vuip.video_format, 3, "VUI: video_format"); writer.writeBool(vuip.video_full_range_flag, "VUI: video_full_range_flag"); writer.writeBool(vuip.colour_description_present_flag, "VUI: colour_description_present_flag"); if (vuip.colour_description_present_flag) { writer.writeNBit(vuip.colour_primaries, 8, "VUI: colour_primaries"); writer.writeNBit(vuip.transfer_characteristics, 8, "VUI: transfer_characteristics"); writer.writeNBit(vuip.matrix_coefficients, 8, "VUI: matrix_coefficients"); } } writer.writeBool(vuip.chroma_loc_info_present_flag, "VUI: chroma_loc_info_present_flag"); if (vuip.chroma_loc_info_present_flag) { writer.writeUE(vuip.chroma_sample_loc_type_top_field, "VUI: chroma_sample_loc_type_top_field"); writer.writeUE(vuip.chroma_sample_loc_type_bottom_field, "VUI: chroma_sample_loc_type_bottom_field"); } writer.writeBool(vuip.timing_info_present_flag, "VUI: timing_info_present_flag"); if (vuip.timing_info_present_flag) { writer.writeNBit(vuip.num_units_in_tick, 32, "VUI: num_units_in_tick"); writer.writeNBit(vuip.time_scale, 32, "VUI: time_scale"); writer.writeBool(vuip.fixed_frame_rate_flag, "VUI: fixed_frame_rate_flag"); } writer.writeBool(vuip.nalHRDParams != null, "VUI: "); if (vuip.nalHRDParams != null) { writeHRDParameters(vuip.nalHRDParams, writer); } writer.writeBool(vuip.vclHRDParams != null, "VUI: "); if (vuip.vclHRDParams != null) { writeHRDParameters(vuip.vclHRDParams, writer); } if (vuip.nalHRDParams != null || vuip.vclHRDParams != null) { writer .writeBool(vuip.low_delay_hrd_flag, "VUI: low_delay_hrd_flag"); } writer.writeBool(vuip.pic_struct_present_flag, "VUI: pic_struct_present_flag"); writer.writeBool(vuip.bitstreamRestriction != null, "VUI: "); if (vuip.bitstreamRestriction != null) { writer .writeBool( vuip.bitstreamRestriction.motion_vectors_over_pic_boundaries_flag, "VUI: motion_vectors_over_pic_boundaries_flag"); writer.writeUE(vuip.bitstreamRestriction.max_bytes_per_pic_denom, "VUI: max_bytes_per_pic_denom"); writer.writeUE(vuip.bitstreamRestriction.max_bits_per_mb_denom, "VUI: max_bits_per_mb_denom"); writer.writeUE( vuip.bitstreamRestriction.log2_max_mv_length_horizontal, "VUI: log2_max_mv_length_horizontal"); writer.writeUE( vuip.bitstreamRestriction.log2_max_mv_length_vertical, "VUI: log2_max_mv_length_vertical"); writer.writeUE(vuip.bitstreamRestriction.num_reorder_frames, "VUI: num_reorder_frames"); writer.writeUE(vuip.bitstreamRestriction.max_dec_frame_buffering, "VUI: max_dec_frame_buffering"); } } private void writeHRDParameters(HRDParameters hrd, CAVLCWriter writer) throws IOException { writer.writeUE(hrd.cpb_cnt_minus1, "HRD: cpb_cnt_minus1"); writer.writeNBit(hrd.bit_rate_scale, 4, "HRD: bit_rate_scale"); writer.writeNBit(hrd.cpb_size_scale, 4, "HRD: cpb_size_scale"); for (int SchedSelIdx = 0; SchedSelIdx <= hrd.cpb_cnt_minus1; SchedSelIdx++) { writer.writeUE(hrd.bit_rate_value_minus1[SchedSelIdx], "HRD: "); writer.writeUE(hrd.cpb_size_value_minus1[SchedSelIdx], "HRD: "); writer.writeBool(hrd.cbr_flag[SchedSelIdx], "HRD: "); } writer.writeNBit(hrd.initial_cpb_removal_delay_length_minus1, 5, "HRD: initial_cpb_removal_delay_length_minus1"); writer.writeNBit(hrd.cpb_removal_delay_length_minus1, 5, "HRD: cpb_removal_delay_length_minus1"); writer.writeNBit(hrd.dpb_output_delay_length_minus1, 5, "HRD: dpb_output_delay_length_minus1"); writer.writeNBit(hrd.time_offset_length, 5, "HRD: time_offset_length"); } @Override public String toString() { return "SeqParameterSet{ " + "\n pic_order_cnt_type=" + pic_order_cnt_type + ", \n field_pic_flag=" + field_pic_flag + ", \n delta_pic_order_always_zero_flag=" + delta_pic_order_always_zero_flag + ", \n weighted_pred_flag=" + weighted_pred_flag + ", \n weighted_bipred_idc=" + weighted_bipred_idc + ", \n entropy_coding_mode_flag=" + entropy_coding_mode_flag + ", \n mb_adaptive_frame_field_flag=" + mb_adaptive_frame_field_flag + ", \n direct_8x8_inference_flag=" + direct_8x8_inference_flag + ", \n chroma_format_idc=" + chroma_format_idc + ", \n log2_max_frame_num_minus4=" + log2_max_frame_num_minus4 + ", \n log2_max_pic_order_cnt_lsb_minus4=" + log2_max_pic_order_cnt_lsb_minus4 + ", \n pic_height_in_map_units_minus1=" + pic_height_in_map_units_minus1 + ", \n pic_width_in_mbs_minus1=" + pic_width_in_mbs_minus1 + ", \n bit_depth_luma_minus8=" + bit_depth_luma_minus8 + ", \n bit_depth_chroma_minus8=" + bit_depth_chroma_minus8 + ", \n qpprime_y_zero_transform_bypass_flag=" + qpprime_y_zero_transform_bypass_flag + ", \n profile_idc=" + profile_idc + ", \n constraint_set_0_flag=" + constraint_set_0_flag + ", \n constraint_set_1_flag=" + constraint_set_1_flag + ", \n constraint_set_2_flag=" + constraint_set_2_flag + ", \n constraint_set_3_flag=" + constraint_set_3_flag + ", \n constraint_set_4_flag=" + constraint_set_4_flag + ", \n constraint_set_5_flag=" + constraint_set_5_flag + ", \n level_idc=" + level_idc + ", \n seq_parameter_set_id=" + seq_parameter_set_id + ", \n residual_color_transform_flag=" + residual_color_transform_flag + ", \n offset_for_non_ref_pic=" + offset_for_non_ref_pic + ", \n offset_for_top_to_bottom_field=" + offset_for_top_to_bottom_field + ", \n num_ref_frames=" + num_ref_frames + ", \n gaps_in_frame_num_value_allowed_flag=" + gaps_in_frame_num_value_allowed_flag + ", \n frame_mbs_only_flag=" + frame_mbs_only_flag + ", \n frame_cropping_flag=" + frame_cropping_flag + ", \n frame_crop_left_offset=" + frame_crop_left_offset + ", \n frame_crop_right_offset=" + frame_crop_right_offset + ", \n frame_crop_top_offset=" + frame_crop_top_offset + ", \n frame_crop_bottom_offset=" + frame_crop_bottom_offset + ", \n offsetForRefFrame=" + offsetForRefFrame + ", \n vuiParams=" + vuiParams + ", \n scalingMatrix=" + scalingMatrix + ", \n num_ref_frames_in_pic_order_cnt_cycle=" + num_ref_frames_in_pic_order_cnt_cycle + '}'; } }