focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Deprecated
@Override
public V remove(final Object key) {
throw new UnsupportedOperationException("Removing from registeredStores is not allowed");
} | @SuppressWarnings("deprecation")
@Test
public void shouldForbidConditionalRemove() {
final FixedOrderMap<String, Integer> map = new FixedOrderMap<>();
map.put("a", 0);
assertThrows(UnsupportedOperationException.class, () -> map.remove("a", 0));
assertEquals(0, map.get("a"));
} |
public CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> commitOffset(
RequestContext context,
OffsetCommitRequestData request
) throws ApiException {
Group group = validateOffsetCommit(context, request);
// In the old consumer group protocol, the offset commits maintain the session if
// the group is in Stable or PreparingRebalance state.
if (group.type() == Group.GroupType.CLASSIC) {
ClassicGroup classicGroup = (ClassicGroup) group;
if (classicGroup.isInState(ClassicGroupState.STABLE) || classicGroup.isInState(ClassicGroupState.PREPARING_REBALANCE)) {
groupMetadataManager.rescheduleClassicGroupMemberHeartbeat(
classicGroup,
classicGroup.member(request.memberId())
);
}
}
final OffsetCommitResponseData response = new OffsetCommitResponseData();
final List<CoordinatorRecord> records = new ArrayList<>();
final long currentTimeMs = time.milliseconds();
final OptionalLong expireTimestampMs = expireTimestampMs(request.retentionTimeMs(), currentTimeMs);
request.topics().forEach(topic -> {
final OffsetCommitResponseTopic topicResponse = new OffsetCommitResponseTopic().setName(topic.name());
response.topics().add(topicResponse);
topic.partitions().forEach(partition -> {
if (isMetadataInvalid(partition.committedMetadata())) {
topicResponse.partitions().add(new OffsetCommitResponsePartition()
.setPartitionIndex(partition.partitionIndex())
.setErrorCode(Errors.OFFSET_METADATA_TOO_LARGE.code()));
} else {
log.debug("[GroupId {}] Committing offsets {} for partition {}-{} from member {} with leader epoch {}.",
request.groupId(), partition.committedOffset(), topic.name(), partition.partitionIndex(),
request.memberId(), partition.committedLeaderEpoch());
topicResponse.partitions().add(new OffsetCommitResponsePartition()
.setPartitionIndex(partition.partitionIndex())
.setErrorCode(Errors.NONE.code()));
final OffsetAndMetadata offsetAndMetadata = OffsetAndMetadata.fromRequest(
partition,
currentTimeMs,
expireTimestampMs
);
records.add(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
request.groupId(),
topic.name(),
partition.partitionIndex(),
offsetAndMetadata,
metadataImage.features().metadataVersion()
));
}
});
});
if (!records.isEmpty()) {
metrics.record(GroupCoordinatorMetrics.OFFSET_COMMITS_SENSOR_NAME, records.size());
}
return new CoordinatorResult<>(records, response);
} | @Test
public void testSimpleGroupOffsetCommitWithInstanceId() {
OffsetMetadataManagerTestContext context = new OffsetMetadataManagerTestContext.Builder().build();
CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> result = context.commitOffset(
new OffsetCommitRequestData()
.setGroupId("foo")
// Instance id should be ignored.
.setGroupInstanceId("instance-id")
.setTopics(Collections.singletonList(
new OffsetCommitRequestData.OffsetCommitRequestTopic()
.setName("bar")
.setPartitions(Collections.singletonList(
new OffsetCommitRequestData.OffsetCommitRequestPartition()
.setPartitionIndex(0)
.setCommittedOffset(100L)
))
))
);
assertEquals(
new OffsetCommitResponseData()
.setTopics(Collections.singletonList(
new OffsetCommitResponseData.OffsetCommitResponseTopic()
.setName("bar")
.setPartitions(Collections.singletonList(
new OffsetCommitResponseData.OffsetCommitResponsePartition()
.setPartitionIndex(0)
.setErrorCode(Errors.NONE.code())
))
)),
result.response()
);
assertEquals(
Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
"foo",
"bar",
0,
new OffsetAndMetadata(
100L,
OptionalInt.empty(),
"",
context.time.milliseconds(),
OptionalLong.empty()
),
MetadataImage.EMPTY.features().metadataVersion()
)),
result.records()
);
} |
@Override
public EntryEventType getEventType() {
throw new UnsupportedOperationException();
} | @Test(expected = UnsupportedOperationException.class)
public void testGetEventType() {
singleIMapEvent.getEventType();
} |
public static String normalizeUri(String uri) throws URISyntaxException {
// try to parse using the simpler and faster Camel URI parser
String[] parts = CamelURIParser.fastParseUri(uri);
if (parts != null) {
// we optimized specially if an empty array is returned
if (parts == URI_ALREADY_NORMALIZED) {
return uri;
}
// use the faster and more simple normalizer
return doFastNormalizeUri(parts);
} else {
// use the legacy normalizer as the uri is complex and may have unsafe URL characters
return doComplexNormalizeUri(uri);
}
} | @Test
public void testNormalizeEndpointWithPercentSignInParameter() throws Exception {
String out = URISupport.normalizeUri("http://someendpoint?username=james&password=%25test");
assertNotNull(out);
// Camel will safe encode the URI
assertEquals("http://someendpoint?password=%25test&username=james", out);
} |
public void writeTo(T object, DataWriter writer) throws IOException {
writeTo(object, 0, writer);
} | @Test
public void testNotExportedBean() throws IOException {
ExportConfig config = new ExportConfig().withFlavor(Flavor.JSON).withExportInterceptor(new ExportInterceptor1()).withSkipIfFail(true);
StringWriter writer = new StringWriter();
ExportableBean b = new ExportableBean();
builder.get(ExportableBean.class).writeTo(b,Flavor.JSON.createDataWriter(b, writer, config));
Assert.assertEquals("{\"_class\":\""+ExportableBean.class.getName()+"\",\"name\":\"property1\",\"notExportedBean\":{},\"shouldBeNull\":null}",
writer.toString());
} |
@Override
public boolean add(final Integer value) {
return add(value.intValue());
} | @Test
public void worksCorrectlyWhenFull() {
final IntHashSet set = new IntHashSet(2, 0);
set.add(1);
set.add(2);
assertContains(set, 2);
assertNotContains(set, 3);
} |
@Override
public String version() {
return AppInfoParser.getVersion();
} | @Test
public void testValueToKeyVersionRetrievedFromAppInfoParser() {
assertEquals(AppInfoParser.getVersion(), xform.version());
} |
@Override
public PortNumber decode(int value) {
return PortNumber.portNumber(value);
} | @Test
public void testDecode() {
assertThat(sut.decode(100), is(PortNumber.portNumber(100)));
} |
public static AvroGenericCoder of(Schema schema) {
return AvroGenericCoder.of(schema);
} | @Test
public void testDeterministicNonDeterministicChild() {
// Super class has non deterministic fields.
assertNonDeterministic(
AvroCoder.of(SubclassOfUnorderedMapClass.class),
reasonField(UnorderedMapClass.class, "mapField", "may not be deterministically ordered"));
} |
public static SchemaAndValue parseString(String value) {
if (value == null) {
return NULL_SCHEMA_AND_VALUE;
}
if (value.isEmpty()) {
return new SchemaAndValue(Schema.STRING_SCHEMA, value);
}
ValueParser parser = new ValueParser(new Parser(value));
return parser.parse(false);
} | @Test
public void shouldParseTimestampStringAsTimestampInArray() throws Exception {
String tsStr = "2019-08-23T14:34:54.346Z";
String arrayStr = "[" + tsStr + "]";
SchemaAndValue result = Values.parseString(arrayStr);
assertEquals(Type.ARRAY, result.schema().type());
Schema elementSchema = result.schema().valueSchema();
assertEquals(Type.INT64, elementSchema.type());
assertEquals(Timestamp.LOGICAL_NAME, elementSchema.name());
java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(tsStr);
assertEquals(Collections.singletonList(expected), result.value());
} |
public CoordinatorResult<TxnOffsetCommitResponseData, CoordinatorRecord> commitTransactionalOffset(
RequestContext context,
TxnOffsetCommitRequestData request
) throws ApiException {
validateTransactionalOffsetCommit(context, request);
final TxnOffsetCommitResponseData response = new TxnOffsetCommitResponseData();
final List<CoordinatorRecord> records = new ArrayList<>();
final long currentTimeMs = time.milliseconds();
request.topics().forEach(topic -> {
final TxnOffsetCommitResponseTopic topicResponse = new TxnOffsetCommitResponseTopic().setName(topic.name());
response.topics().add(topicResponse);
topic.partitions().forEach(partition -> {
if (isMetadataInvalid(partition.committedMetadata())) {
topicResponse.partitions().add(new TxnOffsetCommitResponsePartition()
.setPartitionIndex(partition.partitionIndex())
.setErrorCode(Errors.OFFSET_METADATA_TOO_LARGE.code()));
} else {
log.debug("[GroupId {}] Committing transactional offsets {} for partition {}-{} from member {} with leader epoch {}.",
request.groupId(), partition.committedOffset(), topic.name(), partition.partitionIndex(),
request.memberId(), partition.committedLeaderEpoch());
topicResponse.partitions().add(new TxnOffsetCommitResponsePartition()
.setPartitionIndex(partition.partitionIndex())
.setErrorCode(Errors.NONE.code()));
final OffsetAndMetadata offsetAndMetadata = OffsetAndMetadata.fromRequest(
partition,
currentTimeMs
);
records.add(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
request.groupId(),
topic.name(),
partition.partitionIndex(),
offsetAndMetadata,
metadataImage.features().metadataVersion()
));
}
});
});
if (!records.isEmpty()) {
metrics.record(GroupCoordinatorMetrics.OFFSET_COMMITS_SENSOR_NAME, records.size());
}
return new CoordinatorResult<>(records, response);
} | @Test
public void testGenericGroupTransactionalOffsetCommitWithIllegalGenerationId() {
OffsetMetadataManagerTestContext context = new OffsetMetadataManagerTestContext.Builder().build();
// Create a group.
ClassicGroup group = context.groupMetadataManager.getOrMaybeCreateClassicGroup(
"foo",
true
);
// Add member.
ClassicGroupMember member = mkGenericMember("member", Optional.empty());
group.add(member);
// Transition to next generation.
group.transitionTo(ClassicGroupState.PREPARING_REBALANCE);
group.initNextGeneration();
assertEquals(1, group.generationId());
group.transitionTo(ClassicGroupState.STABLE);
assertThrows(IllegalGenerationException.class, () -> context.commitTransactionalOffset(
new TxnOffsetCommitRequestData()
.setGroupId("foo")
.setMemberId("member")
.setGenerationId(100)
.setTopics(Collections.singletonList(
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
.setName("bar")
.setPartitions(Collections.singletonList(
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
.setPartitionIndex(0)
.setCommittedOffset(100L)
.setCommittedLeaderEpoch(10)
.setCommittedMetadata("metadata")
))
))
));
} |
@Override
public OpenstackNode updateIntbridge(DeviceId newIntgBridge) {
return new Builder()
.type(type)
.hostname(hostname)
.intgBridge(newIntgBridge)
.managementIp(managementIp)
.dataIp(dataIp)
.vlanIntf(vlanIntf)
.uplinkPort(uplinkPort)
.state(state)
.phyIntfs(phyIntfs)
.sshAuthInfo(sshAuth)
.dpdkConfig(dpdkConfig)
.keystoneConfig(keystoneConfig)
.neutronConfig(neutronConfig)
.controllers(controllers)
.build();
} | @Test
public void testUpdateIntBridge() {
OpenstackNode updatedNode = refNode.updateIntbridge(DeviceId.deviceId("br-tun"));
checkCommonProperties(updatedNode);
assertEquals(updatedNode.intgBridge(), DeviceId.deviceId("br-tun"));
} |
@Override
public CompletableFuture<Void> cleanupAsync(JobID jobId) {
mainThreadExecutor.assertRunningInMainThread();
CompletableFuture<Void> cleanupFuture = FutureUtils.completedVoidFuture();
for (CleanupWithLabel<T> cleanupWithLabel : prioritizedCleanup) {
cleanupFuture =
cleanupFuture.thenCompose(
ignoredValue ->
withRetry(
jobId,
cleanupWithLabel.getLabel(),
cleanupWithLabel.getCleanup()));
}
return cleanupFuture.thenCompose(
ignoredValue ->
FutureUtils.completeAll(
regularCleanup.stream()
.map(
cleanupWithLabel ->
withRetry(
jobId,
cleanupWithLabel.getLabel(),
cleanupWithLabel.getCleanup()))
.collect(Collectors.toList())));
} | @Test
void testSuccessfulConcurrentCleanup() {
final SingleCallCleanup cleanup0 = SingleCallCleanup.withoutCompletionOnCleanup();
final SingleCallCleanup cleanup1 = SingleCallCleanup.withoutCompletionOnCleanup();
final CompletableFuture<Void> cleanupResult =
createTestInstanceBuilder()
.withRegularCleanup("Reg #0", cleanup0)
.withRegularCleanup("Reg #1", cleanup1)
.build()
.cleanupAsync(JOB_ID);
assertThat(cleanupResult).isNotCompleted();
assertThat(cleanup0).extracting(SingleCallCleanup::getProcessedJobId).isEqualTo(JOB_ID);
assertThat(cleanup1).extracting(SingleCallCleanup::getProcessedJobId).isEqualTo(JOB_ID);
cleanup0.completeCleanup();
assertThat(cleanupResult).isNotCompleted();
cleanup1.completeCleanup();
assertThat(cleanupResult).isCompleted();
} |
public static OffsetDateTimeByInstantComparator getInstance() {
return INSTANCE;
} | @Test
void should_have_one_instance() {
assertThat(comparator).isSameAs(OffsetDateTimeByInstantComparator.getInstance());
} |
public XmlStreamInfo information() throws IOException {
if (information.problem != null) {
return information;
}
if (XMLStreamConstants.START_DOCUMENT != reader.getEventType()) {
information.problem = new IllegalStateException("Expected START_DOCUMENT");
return information;
}
boolean skipComments = false;
try {
while (reader.hasNext()) {
int ev = reader.next();
switch (ev) {
case XMLStreamConstants.COMMENT:
if (!skipComments) {
// search for modelines
String comment = reader.getText();
if (comment != null) {
comment.lines().map(String::trim).forEach(l -> {
if (l.startsWith("camel-k:")) {
information.modelines.add(l);
}
});
}
}
break;
case XMLStreamConstants.START_ELEMENT:
if (information.rootElementName != null) {
// only root element is checked. No need to parse more
return information;
}
skipComments = true;
information.rootElementName = reader.getLocalName();
information.rootElementNamespace = reader.getNamespaceURI();
for (int ns = 0; ns < reader.getNamespaceCount(); ns++) {
String prefix = reader.getNamespacePrefix(ns);
information.namespaceMapping.put(prefix == null ? "" : prefix, reader.getNamespaceURI(ns));
}
for (int at = 0; at < reader.getAttributeCount(); at++) {
QName qn = reader.getAttributeName(at);
String prefix = qn.getPrefix() == null ? "" : qn.getPrefix().trim();
String nsURI = qn.getNamespaceURI() == null ? "" : qn.getNamespaceURI().trim();
String value = reader.getAttributeValue(at);
String localPart = qn.getLocalPart();
if (nsURI.isEmpty() || prefix.isEmpty()) {
// according to XML spec, this attribut is not namespaced, not in default namespace
// https://www.w3.org/TR/xml-names/#defaulting
// > The namespace name for an unprefixed attribute name always has no value.
information.attributes.put(localPart, value);
} else {
information.attributes.put("{" + nsURI + "}" + localPart, value);
information.attributes.put(prefix + ":" + localPart, value);
}
}
break;
case XMLStreamConstants.END_ELEMENT:
case XMLStreamConstants.END_DOCUMENT:
if (information.rootElementName == null) {
information.problem = new IllegalArgumentException("XML Stream is empty");
return information;
}
break;
default:
break;
}
}
} catch (XMLStreamException e) {
information.problem = e;
return information;
}
return information;
} | @Test
public void simpleRoute() throws IOException {
String xml = readAllFromFile("simpleRoute.xml");
XmlStreamDetector detector
= new XmlStreamDetector(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8)));
XmlStreamInfo info = detector.information();
assertTrue(info.isValid());
assertEquals("routes", info.getRootElementName());
assertEquals("http://camel.apache.org/schema/spring", info.getRootElementNamespace());
assertEquals(2, info.getAttributes().size());
assertTrue(info.getAttributes().get("xsi:schemaLocation")
.contains("https://camel.apache.org/schema/spring/camel-spring.xsd"));
assertTrue(info.getAttributes().get("{http://www.w3.org/2001/XMLSchema-instance}schemaLocation")
.contains("https://camel.apache.org/schema/spring/camel-spring.xsd"));
assertEquals(2, info.getNamespaces().size());
assertEquals("http://camel.apache.org/schema/spring", info.getNamespaces().get(""));
assertEquals("http://www.w3.org/2001/XMLSchema-instance", info.getNamespaces().get("xsi"));
} |
static Integer findNextUnusedIdFromGap(NavigableSet<Integer> setWithGaps) {
int next = 0;
while (setWithGaps.contains(next)) {
next++;
}
return next;
} | @Test
public void testFindingIdsInGaps() {
assertThat(NodeIdAssignor.findNextUnusedIdFromGap(new TreeSet<>(Set.of(0, 2))), is(1));
assertThat(NodeIdAssignor.findNextUnusedIdFromGap(new TreeSet<>(Set.of(0, 2, 3, 4, 5))), is(1));
assertThat(NodeIdAssignor.findNextUnusedIdFromGap(new TreeSet<>(Set.of(0, 1, 3, 4, 5))), is(2));
assertThat(NodeIdAssignor.findNextUnusedIdFromGap(new TreeSet<>(Set.of(0, 1, 2, 4, 5))), is(3));
assertThat(NodeIdAssignor.findNextUnusedIdFromGap(new TreeSet<>(Set.of(0, 1, 2, 3, 5))), is(4));
assertThat(NodeIdAssignor.findNextUnusedIdFromGap(new TreeSet<>(Set.of(0, 3, 4, 5))), is(1));
assertThat(NodeIdAssignor.findNextUnusedIdFromGap(new TreeSet<>(Set.of(0, 4, 5))), is(1));
assertThat(NodeIdAssignor.findNextUnusedIdFromGap(new TreeSet<>(Set.of(0, 5))), is(1));
} |
public void addEntry(WhitelistEntry entry) {
final UrlWhitelist modified = addEntry(getWhitelist(), entry);
saveWhitelist(modified);
} | @Test
public void addEntry() {
final WhitelistEntry existingEntry = LiteralWhitelistEntry.create("a", "a", "a");
final UrlWhitelist existingWhitelist = UrlWhitelist.createEnabled(Collections.singletonList(existingEntry));
final WhitelistEntry newEntry = LiteralWhitelistEntry.create("b", "b", "b");
final UrlWhitelist whitelistWithEntryAdded = urlWhitelistService.addEntry(existingWhitelist, newEntry);
assertThat(whitelistWithEntryAdded).isEqualTo(
UrlWhitelist.createEnabled(ImmutableList.of(existingEntry, newEntry)));
final WhitelistEntry replacedEntry = LiteralWhitelistEntry.create("a", "c", "c");
final UrlWhitelist whitelistWithEntryReplaced =
urlWhitelistService.addEntry(whitelistWithEntryAdded, replacedEntry);
assertThat(whitelistWithEntryReplaced).isEqualTo(
UrlWhitelist.createEnabled(ImmutableList.of(replacedEntry, newEntry)));
} |
@Override
public void removeRule(final RuleData ruleData) {
Optional.ofNullable(ruleData.getHandle()).ifPresent(s -> CACHED_HANDLE.get().removeHandle(CacheKeyUtils.INST.getKey(ruleData)));
} | @Test
public void removeRuleTest() {
contextPathPluginDataHandler.removeRule(RuleData.builder().handle("{}").build());
} |
public static MetadataExtractor create(String metadataClassName) {
String metadataExtractorClassName = metadataClassName;
try {
LOGGER.info("Instantiating MetadataExtractor class {}", metadataExtractorClassName);
MetadataExtractor metadataExtractor = (MetadataExtractor) Class.forName(metadataExtractorClassName).newInstance();
return metadataExtractor;
} catch (Exception e) {
LOGGER.warn("No metadata extractor class passed in, using default");
return new DefaultMetadataExtractor();
}
} | @Test
public void testDefaultMetadataProvider() {
Assert.assertTrue(MetadataExtractorFactory.create(null) instanceof DefaultMetadataExtractor);
} |
protected static boolean startMarketActivity(
@NonNull Context context, @NonNull String marketKeyword) {
try {
Intent search = new Intent(Intent.ACTION_VIEW);
Uri uri =
new Uri.Builder()
.scheme("market")
.authority("search")
.appendQueryParameter("q", "AnySoftKeyboard " + marketKeyword)
.build();
search.setData(uri);
search.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(search);
} catch (Exception ex) {
Logger.e(TAG, "Could not launch Store search!", ex);
return false;
}
return true;
} | @Test
public void testUtilityNoMarketError() {
Application context = ApplicationProvider.getApplicationContext();
Assert.assertTrue(AddOnStoreSearchController.startMarketActivity(context, "play"));
var intent = Shadows.shadowOf(context).getNextStartedActivity();
Assert.assertEquals(Intent.ACTION_VIEW, intent.getAction());
Assert.assertEquals(Uri.parse("market://search?q=AnySoftKeyboard%20play"), intent.getData());
} |
public Map<String, MinionEventObserver> getMinionEventObserverWithGivenState(MinionTaskState taskState) {
return _taskEventObservers.entrySet().stream()
.filter(e -> e.getValue().getTaskState() == taskState)
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
} | @Test
public void testGetMinionEventObserverWithGivenState() {
MinionEventObserver observer1 = new MinionProgressObserver();
observer1.notifyTaskStart(null);
MinionEventObservers.getInstance().addMinionEventObserver("t01", observer1);
MinionEventObserver observer2 = new MinionProgressObserver();
observer2.notifyProgress(null, "");
MinionEventObservers.getInstance().addMinionEventObserver("t02", observer2);
MinionEventObserver observer3 = new MinionProgressObserver();
observer3.notifyTaskSuccess(null, "");
MinionEventObservers.getInstance().addMinionEventObserver("t03", observer3);
Map<String, MinionEventObserver> minionEventObserverWithGivenState =
MinionEventObservers.getInstance().getMinionEventObserverWithGivenState(MinionTaskState.IN_PROGRESS);
assertEquals(minionEventObserverWithGivenState.size(), 2);
assertSame(minionEventObserverWithGivenState.get("t01"), observer1);
assertSame(minionEventObserverWithGivenState.get("t02"), observer2);
} |
public static <T> Write<T> write(String jdbcUrl, String table) {
return new AutoValue_ClickHouseIO_Write.Builder<T>()
.jdbcUrl(jdbcUrl)
.table(table)
.properties(new Properties())
.maxInsertBlockSize(DEFAULT_MAX_INSERT_BLOCK_SIZE)
.initialBackoff(DEFAULT_INITIAL_BACKOFF)
.maxRetries(DEFAULT_MAX_RETRIES)
.maxCumulativeBackoff(DEFAULT_MAX_CUMULATIVE_BACKOFF)
.build()
.withInsertDeduplicate(true)
.withInsertDistributedSync(true);
} | @Test
public void testInt64WithDefault() throws Exception {
Schema schema = Schema.of(Schema.Field.nullable("f0", FieldType.INT64));
Row row1 = Row.withSchema(schema).addValue(1L).build();
Row row2 = Row.withSchema(schema).addValue(null).build();
Row row3 = Row.withSchema(schema).addValue(3L).build();
executeSql("CREATE TABLE test_int64_with_default (f0 Int64 DEFAULT -1) ENGINE=Log");
pipeline
.apply(Create.of(row1, row2, row3).withRowSchema(schema))
.apply(write("test_int64_with_default"));
pipeline.run().waitUntilFinish();
long sum = executeQueryAsLong("SELECT SUM(f0) FROM test_int64_with_default");
assertEquals(3L, sum);
} |
public long minOffset(MessageQueue mq) throws MQClientException {
String brokerAddr = this.mQClientFactory.findBrokerAddressInPublish(this.mQClientFactory.getBrokerNameFromMessageQueue(mq));
if (null == brokerAddr) {
this.mQClientFactory.updateTopicRouteInfoFromNameServer(mq.getTopic());
brokerAddr = this.mQClientFactory.findBrokerAddressInPublish(this.mQClientFactory.getBrokerNameFromMessageQueue(mq));
}
if (brokerAddr != null) {
try {
return this.mQClientFactory.getMQClientAPIImpl().getMinOffset(brokerAddr, mq, timeoutMillis);
} catch (Exception e) {
throw new MQClientException("Invoke Broker[" + brokerAddr + "] exception", e);
}
}
throw new MQClientException("The broker[" + mq.getBrokerName() + "] not exist", null);
} | @Test
public void assertMinOffset() throws MQClientException {
assertEquals(0, mqAdminImpl.minOffset(new MessageQueue()));
} |
public FEELFnResult<Boolean> invoke(@ParameterName( "list" ) List list) {
if ( list == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null"));
}
boolean result = false;
boolean containsNull = false;
// Spec. definition: return true if any item is true, else false if all items are false, else null
for ( final Object element : list ) {
if (element != null && !(element instanceof Boolean)) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "an element in the list is not a Boolean"));
} else {
if (element != null) {
result |= (Boolean) element;
} else if (!containsNull) {
containsNull = true;
}
}
}
if (containsNull && !result) {
return FEELFnResult.ofResult( null );
} else {
return FEELFnResult.ofResult( result );
}
} | @Test
void invokeArrayParamReturnTrue() {
FunctionTestUtil.assertResult(anyFunction.invoke(new Object[]{Boolean.TRUE, Boolean.TRUE}), true);
FunctionTestUtil.assertResult(anyFunction.invoke(new Object[]{Boolean.TRUE, Boolean.FALSE}), true);
FunctionTestUtil.assertResult(anyFunction.invoke(new Object[]{Boolean.TRUE, null}), true);
FunctionTestUtil.assertResult(anyFunction.invoke(new Object[]{Boolean.TRUE, null, Boolean.FALSE}), true);
} |
static File getQualifiedBinInner(File hadoopHomeDir, String executable)
throws FileNotFoundException {
String binDirText = "Hadoop bin directory ";
File bin = new File(hadoopHomeDir, "bin");
if (!bin.exists()) {
throw new FileNotFoundException(addOsText(binDirText + E_DOES_NOT_EXIST
+ ": " + bin));
}
if (!bin.isDirectory()) {
throw new FileNotFoundException(addOsText(binDirText + E_NOT_DIRECTORY
+ ": " + bin));
}
File exeFile = new File(bin, executable);
if (!exeFile.exists()) {
throw new FileNotFoundException(
addOsText(E_NO_EXECUTABLE + ": " + exeFile));
}
if (!exeFile.isFile()) {
throw new FileNotFoundException(
addOsText(E_NOT_EXECUTABLE_FILE + ": " + exeFile));
}
try {
return exeFile.getCanonicalFile();
} catch (IOException e) {
// this isn't going to happen, because of all the upfront checks.
// so if it does, it gets converted to a FNFE and rethrown
throw fileNotFoundException(e.toString(), e);
}
} | @Test
public void testBinWinUtilsFound() throws Throwable {
try {
File bin = new File(methodDir, "bin");
File winutils = new File(bin, WINUTILS_EXE);
touch(winutils);
assertEquals(winutils.getCanonicalPath(),
getQualifiedBinInner(methodDir, WINUTILS_EXE).getCanonicalPath());
} finally {
FileUtils.deleteQuietly(methodDir);
}
} |
@SuppressWarnings({"deprecation", "checkstyle:linelength"})
public void convertSiteProperties(Configuration conf,
Configuration yarnSiteConfig, boolean drfUsed,
boolean enableAsyncScheduler, boolean userPercentage,
FSConfigToCSConfigConverterParams.PreemptionMode preemptionMode) {
yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER,
CapacityScheduler.class.getCanonicalName());
if (conf.getBoolean(
FairSchedulerConfiguration.CONTINUOUS_SCHEDULING_ENABLED,
FairSchedulerConfiguration.DEFAULT_CONTINUOUS_SCHEDULING_ENABLED)) {
yarnSiteConfig.setBoolean(
CapacitySchedulerConfiguration.SCHEDULE_ASYNCHRONOUSLY_ENABLE, true);
int interval = conf.getInt(
FairSchedulerConfiguration.CONTINUOUS_SCHEDULING_SLEEP_MS,
FairSchedulerConfiguration.DEFAULT_CONTINUOUS_SCHEDULING_SLEEP_MS);
yarnSiteConfig.setInt(PREFIX +
"schedule-asynchronously.scheduling-interval-ms", interval);
}
// This should be always true to trigger cs auto
// refresh queue.
yarnSiteConfig.setBoolean(
YarnConfiguration.RM_SCHEDULER_ENABLE_MONITORS, true);
if (conf.getBoolean(FairSchedulerConfiguration.PREEMPTION,
FairSchedulerConfiguration.DEFAULT_PREEMPTION)) {
preemptionEnabled = true;
String policies = addMonitorPolicy(ProportionalCapacityPreemptionPolicy.
class.getCanonicalName(), yarnSiteConfig);
yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES,
policies);
int waitTimeBeforeKill = conf.getInt(
FairSchedulerConfiguration.WAIT_TIME_BEFORE_KILL,
FairSchedulerConfiguration.DEFAULT_WAIT_TIME_BEFORE_KILL);
yarnSiteConfig.setInt(
CapacitySchedulerConfiguration.PREEMPTION_WAIT_TIME_BEFORE_KILL,
waitTimeBeforeKill);
long waitBeforeNextStarvationCheck = conf.getLong(
FairSchedulerConfiguration.WAIT_TIME_BEFORE_NEXT_STARVATION_CHECK_MS,
FairSchedulerConfiguration.DEFAULT_WAIT_TIME_BEFORE_NEXT_STARVATION_CHECK_MS);
yarnSiteConfig.setLong(
CapacitySchedulerConfiguration.PREEMPTION_MONITORING_INTERVAL,
waitBeforeNextStarvationCheck);
} else {
if (preemptionMode ==
FSConfigToCSConfigConverterParams.PreemptionMode.NO_POLICY) {
yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES, "");
}
}
// For auto created queue's auto deletion.
if (!userPercentage) {
String policies = addMonitorPolicy(AutoCreatedQueueDeletionPolicy.
class.getCanonicalName(), yarnSiteConfig);
yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES,
policies);
// Set the expired for deletion interval to 10s, consistent with fs.
yarnSiteConfig.setInt(CapacitySchedulerConfiguration.
AUTO_CREATE_CHILD_QUEUE_EXPIRED_TIME, 10);
}
if (conf.getBoolean(FairSchedulerConfiguration.ASSIGN_MULTIPLE,
FairSchedulerConfiguration.DEFAULT_ASSIGN_MULTIPLE)) {
yarnSiteConfig.setBoolean(
CapacitySchedulerConfiguration.ASSIGN_MULTIPLE_ENABLED, true);
} else {
yarnSiteConfig.setBoolean(
CapacitySchedulerConfiguration.ASSIGN_MULTIPLE_ENABLED, false);
}
// Make auto cs conf refresh enabled.
yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES,
addMonitorPolicy(QueueConfigurationAutoRefreshPolicy
.class.getCanonicalName(), yarnSiteConfig));
int maxAssign = conf.getInt(FairSchedulerConfiguration.MAX_ASSIGN,
FairSchedulerConfiguration.DEFAULT_MAX_ASSIGN);
if (maxAssign != FairSchedulerConfiguration.DEFAULT_MAX_ASSIGN) {
yarnSiteConfig.setInt(
CapacitySchedulerConfiguration.MAX_ASSIGN_PER_HEARTBEAT,
maxAssign);
}
float localityThresholdNode = conf.getFloat(
FairSchedulerConfiguration.LOCALITY_THRESHOLD_NODE,
FairSchedulerConfiguration.DEFAULT_LOCALITY_THRESHOLD_NODE);
if (localityThresholdNode !=
FairSchedulerConfiguration.DEFAULT_LOCALITY_THRESHOLD_NODE) {
yarnSiteConfig.setFloat(CapacitySchedulerConfiguration.NODE_LOCALITY_DELAY,
localityThresholdNode);
}
float localityThresholdRack = conf.getFloat(
FairSchedulerConfiguration.LOCALITY_THRESHOLD_RACK,
FairSchedulerConfiguration.DEFAULT_LOCALITY_THRESHOLD_RACK);
if (localityThresholdRack !=
FairSchedulerConfiguration.DEFAULT_LOCALITY_THRESHOLD_RACK) {
yarnSiteConfig.setFloat(
CapacitySchedulerConfiguration.RACK_LOCALITY_ADDITIONAL_DELAY,
localityThresholdRack);
}
if (conf.getBoolean(FairSchedulerConfiguration.SIZE_BASED_WEIGHT,
FairSchedulerConfiguration.DEFAULT_SIZE_BASED_WEIGHT)) {
sizeBasedWeight = true;
}
if (drfUsed) {
yarnSiteConfig.set(
CapacitySchedulerConfiguration.RESOURCE_CALCULATOR_CLASS,
DominantResourceCalculator.class.getCanonicalName());
}
if (enableAsyncScheduler) {
yarnSiteConfig.setBoolean(CapacitySchedulerConfiguration.SCHEDULE_ASYNCHRONOUSLY_ENABLE, true);
}
} | @Test
public void testSiteAssignMultipleConversion() {
yarnConfig.setBoolean(FairSchedulerConfiguration.ASSIGN_MULTIPLE, true);
converter.convertSiteProperties(yarnConfig, yarnConvertedConfig, false,
false, false, null);
assertTrue("Assign multiple",
yarnConvertedConfig.getBoolean(
CapacitySchedulerConfiguration.ASSIGN_MULTIPLE_ENABLED,
false));
} |
@Override
public MySQLPacket getQueryRowPacket() throws SQLException {
return new MySQLTextResultSetRowPacket(proxyBackendHandler.getRowData().getData());
} | @Test
void assertGetQueryRowPacket() throws SQLException {
assertThat(new MySQLComQueryPacketExecutor(packet, connectionSession).getQueryRowPacket(), instanceOf(MySQLTextResultSetRowPacket.class));
} |
@Override
public GroupVersion groupVersion() {
return PublicApiUtils.groupVersion(new Category());
} | @Test
void groupVersion() {
GroupVersion groupVersion = endpoint.groupVersion();
assertThat(groupVersion.toString()).isEqualTo("api.content.halo.run/v1alpha1");
} |
public DicomWebPath parseDicomWebpath(String unparsedWebpath) throws IOException {
String[] webPathSplit = unparsedWebpath.split("/dicomWeb/");
if (webPathSplit.length != 2) {
throw new IOException("Invalid DICOM web path");
}
DicomWebPath dicomWebPath = new DicomWebPath();
dicomWebPath.dicomStorePath = webPathSplit[0];
String[] storePathElements = dicomWebPath.dicomStorePath.split("/");
dicomWebPath.project = storePathElements[1];
dicomWebPath.location = storePathElements[3];
dicomWebPath.dataset = storePathElements[5];
dicomWebPath.storeId = storePathElements[7];
String[] searchParameters;
searchParameters = webPathSplit[1].split("/");
if (searchParameters.length < 2) {
throw new IOException("Invalid DICOM web path");
}
dicomWebPath.studyId = searchParameters[1];
dicomWebPath.seriesId = searchParameters[3];
dicomWebPath.instanceId = searchParameters[5];
return dicomWebPath;
} | @Test
public void test_parsedAllElements() throws IOException {
String webpathStr =
"projects/foo/location/earth/datasets/bar/dicomStores/fee/dicomWeb/studies/abc/series/xyz/instances/123";
WebPathParser parser = new WebPathParser();
WebPathParser.DicomWebPath dicomWebPath = parser.parseDicomWebpath(webpathStr);
Assert.assertNotNull(dicomWebPath);
Assert.assertEquals("foo", dicomWebPath.project);
Assert.assertEquals("earth", dicomWebPath.location);
Assert.assertEquals("bar", dicomWebPath.dataset);
Assert.assertEquals("fee", dicomWebPath.storeId);
Assert.assertEquals("abc", dicomWebPath.studyId);
Assert.assertEquals("xyz", dicomWebPath.seriesId);
Assert.assertEquals("123", dicomWebPath.instanceId);
Assert.assertEquals(
"projects/foo/location/earth/datasets/bar/dicomStores/fee", dicomWebPath.dicomStorePath);
} |
@Override
public V put(K key, V value, Duration ttl) {
return get(putAsync(key, value, ttl));
} | @Test
public void testReadAllEntrySet() throws InterruptedException {
RMapCacheNative<Integer, String> map = redisson.getMapCacheNative("simple12");
map.put(1, "12");
map.put(2, "33", Duration.ofMinutes(10));
map.put(3, "43");
assertThat(map.readAllEntrySet()).isEqualTo(map.entrySet());
map.destroy();
} |
Record convert(Object data) {
return convert(data, null);
} | @Test
public void testNestedMapConvert() {
Table table = mock(Table.class);
when(table.schema()).thenReturn(NESTED_SCHEMA);
RecordConverter converter = new RecordConverter(table, config);
Map<String, Object> nestedData = createNestedMapData();
Record record = converter.convert(nestedData);
assertNestedRecordValues(record);
} |
public static Collection<java.nio.file.Path> listFilesInDirectory(
final java.nio.file.Path directory, final Predicate<java.nio.file.Path> fileFilter)
throws IOException {
checkNotNull(directory, "directory");
checkNotNull(fileFilter, "fileFilter");
if (!Files.exists(directory)) {
throw new IllegalArgumentException(
String.format("The directory %s dose not exist.", directory));
}
if (!Files.isDirectory(directory)) {
throw new IllegalArgumentException(
String.format("The %s is not a directory.", directory));
}
final FilterFileVisitor filterFileVisitor = new FilterFileVisitor(fileFilter);
Files.walkFileTree(
directory,
EnumSet.of(FileVisitOption.FOLLOW_LINKS),
Integer.MAX_VALUE,
filterFileVisitor);
return filterFileVisitor.getFiles();
} | @Test
void testListAFileFailsBecauseDirectoryIsExpected() throws IOException {
final String fileName = "a.jar";
final File file = TempDirUtils.newFile(temporaryFolder, fileName);
assertThatThrownBy(
() -> FileUtils.listFilesInDirectory(file.toPath(), FileUtils::isJarFile))
.isInstanceOf(IllegalArgumentException.class);
} |
public void setTarget(LogOutput target) {
this.target = target;
} | @Test
public void testChangeTarget() {
listener = mock(LogOutput.class);
appender.setTarget(listener);
testLevelTranslation();
} |
Map<TaskId, Task> allOwnedTasks() {
// not bothering with an unmodifiable map, since the tasks themselves are mutable, but
// if any outside code modifies the map or the tasks, it would be a severe transgression.
return tasks.allTasksPerId();
} | @Test
public void shouldNotReturnStateUpdaterTasksInOwnedTasks() {
final StreamTask activeTask = statefulTask(taskId03, taskId03ChangelogPartitions)
.inState(State.RUNNING)
.withInputPartitions(taskId03Partitions).build();
final StandbyTask standbyTask = standbyTask(taskId02, taskId02ChangelogPartitions)
.inState(State.RUNNING)
.withInputPartitions(taskId02Partitions).build();
final TasksRegistry tasks = mock(TasksRegistry.class);
final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, tasks, true);
when(tasks.allTasksPerId()).thenReturn(mkMap(mkEntry(taskId03, activeTask)));
assertEquals(taskManager.allOwnedTasks(), mkMap(mkEntry(taskId03, activeTask)));
} |
@Override
public ParsedSchema toParsedSchema(final PersistenceSchema schema) {
SerdeUtils.throwOnUnsupportedFeatures(schema.features(), format.supportedFeatures());
final ConnectSchema outerSchema = ConnectSchemas.columnsToConnectSchema(schema.columns());
final ConnectSchema innerSchema = SerdeUtils
.applySinglesUnwrapping(outerSchema, schema.features());
return connectSrTranslator.fromConnectSchema(innerSchema);
} | @Test
public void shouldSupportBuildingPrimitiveSchemas() {
// Given:
when(format.supportedFeatures()).thenReturn(ImmutableSet.of(SerdeFeature.UNWRAP_SINGLES));
// When:
translator.toParsedSchema(
PersistenceSchema.from(
ImmutableList.of(createColumn("bob", SqlTypes.INTEGER)),
SerdeFeatures.of(SerdeFeature.UNWRAP_SINGLES)
)
);
// Then:
verify(innerTranslator).fromConnectSchema(SchemaBuilder.int32().optional().build());
} |
@Implementation
public static synchronized String getTokenWithNotification(
Context context, Account account, String scope, Bundle extras)
throws IOException, UserRecoverableNotifiedException, GoogleAuthException {
return googleAuthUtilImpl.getTokenWithNotification(context, account, scope, extras);
} | @Test
public void getTokenWithNotification_nullCallBackThrowIllegalArgumentException()
throws Exception {
thrown.expect(IllegalArgumentException.class);
GoogleAuthUtil.getTokenWithNotification(
RuntimeEnvironment.getApplication(), "name", "scope", null, null);
} |
public static Permission getPermission(String name, String serviceName, String... actions) {
PermissionFactory permissionFactory = PERMISSION_FACTORY_MAP.get(serviceName);
if (permissionFactory == null) {
throw new IllegalArgumentException("No permissions found for service: " + serviceName);
}
return permissionFactory.create(name, actions);
} | @Test
public void getPermission_Map() {
Permission permission = ActionConstants.getPermission("foo", MapService.SERVICE_NAME);
assertNotNull(permission);
assertTrue(permission instanceof MapPermission);
} |
public IndexRecord getIndexInformation(String mapId, int reduce,
Path fileName, String expectedIndexOwner)
throws IOException {
IndexInformation info = cache.get(mapId);
if (info == null) {
info = readIndexFileToCache(fileName, mapId, expectedIndexOwner);
} else {
synchronized(info) {
while (isUnderConstruction(info)) {
try {
info.wait();
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for construction", e);
}
}
}
LOG.debug("IndexCache HIT: MapId " + mapId + " found");
}
if (info.mapSpillRecord.size() == 0 ||
info.mapSpillRecord.size() <= reduce) {
throw new IOException("Invalid request " +
" Map Id = " + mapId + " Reducer = " + reduce +
" Index Info Length = " + info.mapSpillRecord.size());
}
return info.mapSpillRecord.getIndex(reduce);
} | @Test
public void testInvalidReduceNumberOrLength() throws Exception {
fs.delete(p, true);
conf.setInt(MRJobConfig.SHUFFLE_INDEX_CACHE, 1);
final int partsPerMap = 1000;
final int bytesPerFile = partsPerMap * 24;
IndexCache cache = new IndexCache(conf);
// fill cache
Path feq = new Path(p, "invalidReduceOrPartsPerMap");
writeFile(fs, feq, bytesPerFile, partsPerMap);
// Number of reducers should always be less than partsPerMap as reducer
// numbers start from 0 and there cannot be more reducer than parts
try {
// Number of reducers equal to partsPerMap
cache.getIndexInformation("reduceEqualPartsPerMap",
partsPerMap, // reduce number == partsPerMap
feq, UserGroupInformation.getCurrentUser().getShortUserName());
fail("Number of reducers equal to partsPerMap did not fail");
} catch (Exception e) {
if (!(e instanceof IOException)) {
throw e;
}
}
try {
// Number of reducers more than partsPerMap
cache.getIndexInformation(
"reduceMorePartsPerMap",
partsPerMap + 1, // reduce number > partsPerMap
feq, UserGroupInformation.getCurrentUser().getShortUserName());
fail("Number of reducers more than partsPerMap did not fail");
} catch (Exception e) {
if (!(e instanceof IOException)) {
throw e;
}
}
} |
@Override
protected VertexFlameGraph handleRequest(
HandlerRequest<EmptyRequestBody> request, AccessExecutionJobVertex jobVertex)
throws RestHandlerException {
@Nullable Integer subtaskIndex = getSubtaskIndex(request, jobVertex);
if (isTerminated(jobVertex, subtaskIndex)) {
return VertexFlameGraph.terminated();
}
final Optional<VertexThreadInfoStats> threadInfoSample;
if (subtaskIndex == null) {
threadInfoSample =
threadInfoOperatorTracker.getJobVertexStats(
request.getPathParameter(JobIDPathParameter.class), jobVertex);
} else {
threadInfoSample =
threadInfoOperatorTracker.getExecutionVertexStats(
request.getPathParameter(JobIDPathParameter.class),
jobVertex,
subtaskIndex);
}
final FlameGraphTypeQueryParameter.Type flameGraphType = getFlameGraphType(request);
final Optional<VertexFlameGraph> operatorFlameGraph;
switch (flameGraphType) {
case FULL:
operatorFlameGraph =
threadInfoSample.map(VertexFlameGraphFactory::createFullFlameGraphFrom);
break;
case ON_CPU:
operatorFlameGraph =
threadInfoSample.map(VertexFlameGraphFactory::createOnCpuFlameGraph);
break;
case OFF_CPU:
operatorFlameGraph =
threadInfoSample.map(VertexFlameGraphFactory::createOffCpuFlameGraph);
break;
default:
throw new RestHandlerException(
"Unknown Flame Graph type " + flameGraphType + '.',
HttpResponseStatus.BAD_REQUEST);
}
return operatorFlameGraph.orElse(VertexFlameGraph.waiting());
} | @Test
void testHandleMixedSubtasks() throws Exception {
final ArchivedExecutionJobVertex archivedExecutionJobVertex =
new ArchivedExecutionJobVertex(
new ArchivedExecutionVertex[] {
generateExecutionVertex(0, ExecutionState.FINISHED),
generateExecutionVertex(1, ExecutionState.RUNNING)
},
JOB_VERTEX_ID,
"test",
2,
2,
new SlotSharingGroup(),
ResourceProfile.UNKNOWN,
new StringifiedAccumulatorResult[0]);
// Check the finished subtask
HandlerRequest<EmptyRequestBody> request = generateJobVertexFlameGraphParameters(0);
VertexFlameGraph jobVertexFlameGraph =
handler.handleRequest(request, archivedExecutionJobVertex);
assertThat(jobVertexFlameGraph.getEndTime())
.isEqualTo(VertexFlameGraph.terminated().getEndTime());
// Check the running subtask
request = generateJobVertexFlameGraphParameters(1);
jobVertexFlameGraph = handler.handleRequest(request, archivedExecutionJobVertex);
assertThat(jobVertexFlameGraph.getEndTime())
.isEqualTo(taskThreadInfoStatsDefaultSample.getEndTime());
// Check the job vertex
request = generateJobVertexFlameGraphParameters(null);
jobVertexFlameGraph = handler.handleRequest(request, archivedExecutionJobVertex);
assertThat(jobVertexFlameGraph.getEndTime())
.isEqualTo(taskThreadInfoStatsDefaultSample.getEndTime());
} |
public static Map<String, Object> parseParameters(URI uri) throws URISyntaxException {
String query = prepareQuery(uri);
if (query == null) {
// empty an empty map
return new LinkedHashMap<>(0);
}
return parseQuery(query);
} | @Test
public void testParseParameters() throws Exception {
URI u = new URI("quartz:myGroup/myTimerName?cron=0%200%20*%20*%20*%20?");
Map<String, Object> params = URISupport.parseParameters(u);
assertEquals(1, params.size());
assertEquals("0 0 * * * ?", params.get("cron"));
u = new URI("quartz:myGroup/myTimerName?cron=0%200%20*%20*%20*%20?&bar=123");
params = URISupport.parseParameters(u);
assertEquals(2, params.size());
assertEquals("0 0 * * * ?", params.get("cron"));
assertEquals("123", params.get("bar"));
} |
public boolean transitionToFinished()
{
return state.setIf(FINISHED, currentState -> !currentState.isDone());
} | @Test
public void testFinished()
{
StageExecutionStateMachine stateMachine = createStageStateMachine();
assertTrue(stateMachine.transitionToFinished());
assertFinalState(stateMachine, StageExecutionState.FINISHED);
} |
@Override
public void upgrade() {
if (shouldSkip()) {
return;
}
final ImmutableSet<String> eventIndexPrefixes = ImmutableSet.of(
elasticsearchConfig.getDefaultEventsIndexPrefix(),
elasticsearchConfig.getDefaultSystemEventsIndexPrefix());
elasticsearch.addGl2MessageIdFieldAlias(eventIndexPrefixes);
writeMigrationCompleted(eventIndexPrefixes);
} | @Test
void writesMigrationCompletedAfterSuccess() {
mockConfiguredEventPrefixes("events-prefix", "system-events-prefix");
this.sut.upgrade();
final V20200730000000_AddGl2MessageIdFieldAliasForEvents.MigrationCompleted migrationCompleted = captureMigrationCompleted();
assertThat(migrationCompleted.modifiedIndexPrefixes())
.containsExactlyInAnyOrder("events-prefix", "system-events-prefix");
} |
@GET
@Path("/{pluginName}/config")
@Operation(summary = "Get the configuration definition for the specified pluginName")
public List<ConfigKeyInfo> getConnectorConfigDef(final @PathParam("pluginName") String pluginName) {
synchronized (this) {
return herder.connectorPluginConfig(pluginName);
}
} | @Test
public void testGetConnectorConfigDef() {
String connName = ConnectorPluginsResourceTestConnector.class.getName();
when(herder.connectorPluginConfig(eq(connName))).thenAnswer(answer -> {
List<ConfigKeyInfo> results = new ArrayList<>();
for (ConfigDef.ConfigKey configKey : ConnectorPluginsResourceTestConnector.CONFIG_DEF.configKeys().values()) {
results.add(AbstractHerder.convertConfigKey(configKey));
}
return results;
});
List<ConfigKeyInfo> connectorConfigDef = connectorPluginsResource.getConnectorConfigDef(connName);
assertEquals(ConnectorPluginsResourceTestConnector.CONFIG_DEF.names().size(), connectorConfigDef.size());
for (String config : ConnectorPluginsResourceTestConnector.CONFIG_DEF.names()) {
Optional<ConfigKeyInfo> cki = connectorConfigDef.stream().filter(c -> c.name().equals(config)).findFirst();
assertTrue(cki.isPresent());
}
} |
public MetadataBlockType getType() {
return _errCodeToExceptionMap.isEmpty() ? MetadataBlockType.EOS : MetadataBlockType.ERROR;
} | @Test
public void v0EosWithExceptionsIsDecodedAsV2ErrorWithSameExceptions()
throws IOException {
// Given:
// MetadataBlock used to be encoded without any data, we should make sure that
// during rollout or if server versions are mismatched that we can still handle
// the old format
V0MetadataBlock legacyBlock = new V0MetadataBlock();
legacyBlock.addException(250, "timeout");
byte[] bytes = legacyBlock.toBytes();
// When:
ByteBuffer buff = ByteBuffer.wrap(bytes);
DataBlockUtils.readVersionType(buff); // consume the version information before decoding
MetadataBlock metadataBlock = new MetadataBlock(buff);
// Then:
assertEquals(metadataBlock.getType(), MetadataBlock.MetadataBlockType.ERROR);
assertEquals(metadataBlock.getExceptions(), legacyBlock.getExceptions(), "Expected exceptions");
} |
@Override
public Flux<ExtensionStore> listByNamePrefix(String prefix) {
return repository.findAllByNameStartingWith(prefix);
} | @Test
void listByNamePrefix() {
var expectedExtensions = List.of(
new ExtensionStore("/registry/posts/hello-world", "this is post".getBytes(), 1L),
new ExtensionStore("/registry/posts/hello-halo", "this is post".getBytes(), 1L)
);
when(repository.findAllByNameStartingWith("/registry/posts"))
.thenReturn(Flux.fromIterable(expectedExtensions));
var gotExtensions = client.listByNamePrefix("/registry/posts").collectList().block();
assertEquals(expectedExtensions, gotExtensions);
} |
public static Object fromJson(String json, Type originType) throws RuntimeException {
if (!isSupportGson()) {
throw new RuntimeException("Gson is not supported. Please import Gson in JVM env.");
}
Type type = TypeToken.get(originType).getType();
try {
return getGson().fromJson(json, type);
} catch (JsonSyntaxException ex) {
throw new RuntimeException(String.format(
"Generic serialization [%s] Json syntax exception thrown when parsing (message:%s type:%s) error:%s",
GENERIC_SERIALIZATION_GSON, json, type.toString(), ex.getMessage()));
}
} | @Test
void test1() {
Object user = GsonUtils.fromJson("{'name':'Tom','age':24}", User.class);
Assertions.assertInstanceOf(User.class, user);
Assertions.assertEquals("Tom", ((User) user).getName());
Assertions.assertEquals(24, ((User) user).getAge());
try {
GsonUtils.fromJson("{'name':'Tom','age':}", User.class);
Assertions.fail();
} catch (RuntimeException ex) {
Assertions.assertEquals(
"Generic serialization [gson] Json syntax exception thrown when parsing (message:{'name':'Tom','age':} type:class org.apache.dubbo.common.json.GsonUtilsTest$User) error:com.google.gson.stream.MalformedJsonException: Expected value at line 1 column 21 path $.age\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json",
ex.getMessage());
}
} |
@Override
public Object getValue(final int columnIndex, final Class<?> type) throws SQLException {
if (boolean.class == type) {
return resultSet.getBoolean(columnIndex);
}
if (byte.class == type) {
return resultSet.getByte(columnIndex);
}
if (short.class == type) {
return resultSet.getShort(columnIndex);
}
if (int.class == type) {
return resultSet.getInt(columnIndex);
}
if (long.class == type) {
return resultSet.getLong(columnIndex);
}
if (float.class == type) {
return resultSet.getFloat(columnIndex);
}
if (double.class == type) {
return resultSet.getDouble(columnIndex);
}
if (String.class == type) {
return resultSet.getString(columnIndex);
}
if (BigDecimal.class == type) {
return resultSet.getBigDecimal(columnIndex);
}
if (byte[].class == type) {
return resultSet.getBytes(columnIndex);
}
if (Date.class == type) {
return resultSet.getDate(columnIndex);
}
if (Time.class == type) {
return resultSet.getTime(columnIndex);
}
if (Timestamp.class == type) {
return resultSet.getTimestamp(columnIndex);
}
if (Blob.class == type) {
return resultSet.getBlob(columnIndex);
}
if (Clob.class == type) {
return resultSet.getClob(columnIndex);
}
if (Array.class == type) {
return resultSet.getArray(columnIndex);
}
return resultSet.getObject(columnIndex);
} | @Test
void assertGetValueByArray() throws SQLException {
ResultSet resultSet = mock(ResultSet.class);
Array value = mock(Array.class);
when(resultSet.getArray(1)).thenReturn(value);
assertThat(new JDBCStreamQueryResult(resultSet).getValue(1, Array.class), is(value));
} |
public RepositoryList searchRepos(String encodedCredentials, String workspace, @Nullable String repoName, Integer page, Integer pageSize) {
String filterQuery = String.format("q=name~\"%s\"", repoName != null ? repoName : "");
HttpUrl url = buildUrl(String.format("/repositories/%s?%s&page=%s&pagelen=%s", workspace, filterQuery, page, pageSize));
return doGetWithBasicAuth(encodedCredentials, url, r -> buildGson().fromJson(r.body().charStream(), RepositoryList.class));
} | @Test
public void get_repos() {
server.enqueue(new MockResponse()
.setHeader("Content-Type", "application/json;charset=UTF-8")
.setBody("{\n" +
" \"values\": [\n" +
" {\n" +
" \"slug\": \"banana\",\n" +
" \"uuid\": \"BANANA-UUID\",\n" +
" \"name\": \"banana\",\n" +
" \"project\": {\n" +
" \"key\": \"HOY\",\n" +
" \"uuid\": \"BANANA-PROJECT-UUID\",\n" +
" \"name\": \"hoy\"\n" +
" }\n" +
" },\n" +
" {\n" +
" \"slug\": \"potato\",\n" +
" \"uuid\": \"POTATO-UUID\",\n" +
" \"name\": \"potato\",\n" +
" \"project\": {\n" +
" \"key\": \"HEY\",\n" +
" \"uuid\": \"POTATO-PROJECT-UUID\",\n" +
" \"name\": \"hey\"\n" +
" }\n" +
" }\n" +
" ]\n" +
"}"));
RepositoryList repositoryList = underTest.searchRepos("user:apppwd", "", null, 1, 100);
assertThat(repositoryList.getNext()).isNull();
assertThat(repositoryList.getValues())
.hasSize(2)
.extracting(Repository::getUuid, Repository::getName, Repository::getSlug,
g -> g.getProject().getUuid(), g -> g.getProject().getKey(), g -> g.getProject().getName())
.containsExactlyInAnyOrder(
tuple("BANANA-UUID", "banana", "banana", "BANANA-PROJECT-UUID", "HOY", "hoy"),
tuple("POTATO-UUID", "potato", "potato", "POTATO-PROJECT-UUID", "HEY", "hey"));
} |
@Override
public void execute(Context context) {
List<MeasureComputerWrapper> wrappers = Arrays.stream(measureComputers).map(ToMeasureWrapper.INSTANCE).toList();
validateMetrics(wrappers);
measureComputersHolder.setMeasureComputers(sortComputers(wrappers));
} | @Test
public void return_empty_list_when_no_measure_computers() {
ComputationStep underTest = new LoadMeasureComputersStep(holder, array(new TestMetrics()));
underTest.execute(new TestComputationStepContext());
assertThat(holder.getMeasureComputers()).isEmpty();
} |
public QueryObjectBundle rewriteQuery(@Language("SQL") String query, QueryConfiguration queryConfiguration, ClusterType clusterType)
{
return rewriteQuery(query, queryConfiguration, clusterType, false);
} | @Test
public void testRewriteTimestampWithTimeZone()
{
QueryBundle queryBundle = getQueryRewriter().rewriteQuery("SELECT now(), now() now", CONFIGURATION, CONTROL);
assertCreateTableAs(queryBundle.getQuery(), "SELECT\n" +
" CAST(now() AS varchar)\n" +
", CAST(now() AS varchar) now");
} |
@Override
public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException {
return this.list(directory, listener, new HostPreferences(session.getHost()).getInteger("s3.listing.chunksize"));
} | @Test
public void testListFilePlusCharacter() throws Exception {
final Path container = new SpectraDirectoryFeature(session, new SpectraWriteFeature(session)).mkdir(
new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus());
final Path file = new SpectraTouchFeature(session).touch(
new Path(container, String.format("test+%s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.file)), new TransferStatus());
assertNotNull(new SpectraObjectListService(session).list(container, new DisabledListProgressListener()).find(new SimplePathPredicate(file)));
new SpectraDeleteFeature(session).delete(Collections.singletonList(container), new DisabledLoginCallback(), new Delete.DisabledCallback());
} |
public GaussianDistribution(double mu, double sigma) {
this.mu = mu;
this.sigma = sigma;
variance = sigma * sigma;
entropy = Math.log(sigma) + LOG2PIE_2;
pdfConstant = Math.log(sigma) + LOG2PI_2;
} | @Test
public void testGaussianDistribution() {
System.out.println("GaussianDistribution");
MathEx.setSeed(19650218); // to get repeatable results.
GaussianDistribution instance = new GaussianDistribution(3, 2.1);
double[] data = instance.rand(1000);
GaussianDistribution est = GaussianDistribution.fit(data);
assertEquals(3.05, est.mu, 1E-2);
assertEquals(2.13, est.sigma, 1E-2);
} |
@Override
public AuthUser getAuthUser(Integer socialType, Integer userType, String code, String state) {
// 构建请求
AuthRequest authRequest = buildAuthRequest(socialType, userType);
AuthCallback authCallback = AuthCallback.builder().code(code).state(state).build();
// 执行请求
AuthResponse<?> authResponse = authRequest.login(authCallback);
log.info("[getAuthUser][请求社交平台 type({}) request({}) response({})]", socialType,
toJsonString(authCallback), toJsonString(authResponse));
if (!authResponse.ok()) {
throw exception(SOCIAL_USER_AUTH_FAILURE, authResponse.getMsg());
}
return (AuthUser) authResponse.getData();
} | @Test
public void testAuthSocialUser_fail() {
// 准备参数
Integer socialType = SocialTypeEnum.WECHAT_MP.getType();
Integer userType = randomPojo(UserTypeEnum.class).getValue();
String code = randomString();
String state = randomString();
// mock 方法(AuthRequest)
AuthRequest authRequest = mock(AuthRequest.class);
when(authRequestFactory.get(eq("WECHAT_MP"))).thenReturn(authRequest);
// mock 方法(AuthResponse)
AuthResponse<?> authResponse = new AuthResponse<>(0, "模拟失败", null);
when(authRequest.login(argThat(authCallback -> {
assertEquals(code, authCallback.getCode());
assertEquals(state, authCallback.getState());
return true;
}))).thenReturn(authResponse);
// 调用并断言
assertServiceException(
() -> socialClientService.getAuthUser(socialType, userType, code, state),
SOCIAL_USER_AUTH_FAILURE, "模拟失败");
} |
protected int sendSms(String numberTo, String message) throws ThingsboardException {
if (this.smsSender == null) {
throw new ThingsboardException("Unable to send SMS: no SMS provider configured!", ThingsboardErrorCode.GENERAL);
}
return this.sendSms(this.smsSender, numberTo, message);
} | @Test
public void testLimitSmsMessagingByTenantProfileSettings() throws Exception {
tenantProfile = getDefaultTenantProfile();
DefaultTenantProfileConfiguration config = createTenantProfileConfigurationWithSmsLimits(10, true);
saveTenantProfileWitConfiguration(tenantProfile, config);
for (int i = 0; i < 10; i++) {
doReturn(1).when(defaultSmsService).sendSms(any(), any());
defaultSmsService.sendSms(tenantId, null, new String[]{RandomStringUtils.randomNumeric(10)}, "Message");
}
//wait 1 sec so that api usage state is updated
TimeUnit.SECONDS.sleep(1);
assertThrows(RuntimeException.class, () -> {
defaultSmsService.sendSms(tenantId, null, new String[]{RandomStringUtils.randomNumeric(10)}, "Message");
}, "SMS sending is disabled due to API limits!");
} |
public SSLContext createContext(ContextAware context) throws NoSuchProviderException, NoSuchAlgorithmException,
KeyManagementException, UnrecoverableKeyException, KeyStoreException, CertificateException {
SSLContext sslContext = getProvider() != null ? SSLContext.getInstance(getProtocol(), getProvider())
: SSLContext.getInstance(getProtocol());
context.addInfo("SSL protocol '" + sslContext.getProtocol() + "' provider '" + sslContext.getProvider() + "'");
KeyManager[] keyManagers = createKeyManagers(context);
TrustManager[] trustManagers = createTrustManagers(context);
SecureRandom secureRandom = createSecureRandom(context);
sslContext.init(keyManagers, trustManagers, secureRandom);
return sslContext;
} | @Test
public void testCreateDefaultContext() throws Exception {
// should be able to create a context with no configuration at all
assertNotNull(factoryBean.createContext(context));
assertTrue(context.hasInfoMatching(SSL_CONFIGURATION_MESSAGE_PATTERN));
} |
@Override
@Nonnull
public <T> List<Future<T>> invokeAll(@Nonnull Collection<? extends Callable<T>> tasks) {
throwRejectedExecutionExceptionIfShutdown();
ArrayList<Future<T>> result = new ArrayList<>();
for (Callable<T> task : tasks) {
try {
result.add(new CompletedFuture<>(task.call(), null));
} catch (Exception e) {
result.add(new CompletedFuture<>(null, e));
}
}
return result;
} | @Test
void testRejectedInvokeAllWithEmptyListAndTimeout() {
testRejectedExecutionException(
testInstance -> testInstance.invokeAll(Collections.emptyList(), 1L, TimeUnit.DAYS));
} |
@Override
public final boolean isWrapperFor(final Class<?> iface) {
return iface.isInstance(this);
} | @Test
void assertIsWrapperFor() {
assertTrue(wrapperAdapter.isWrapperFor(Object.class));
} |
@Override
public void linkNodes(K parentKey, K childKey, BiConsumer<TreeEntry<K, V>, TreeEntry<K, V>> consumer) {
consumer = ObjectUtil.defaultIfNull(consumer, (parent, child) -> {
});
final TreeEntryNode<K, V> parentNode = nodes.computeIfAbsent(parentKey, t -> new TreeEntryNode<>(null, t));
TreeEntryNode<K, V> childNode = nodes.get(childKey);
// 1.子节点不存在
if (ObjectUtil.isNull(childNode)) {
childNode = new TreeEntryNode<>(parentNode, childKey);
consumer.accept(parentNode, childNode);
nodes.put(childKey, childNode);
return;
}
// 2.子节点存在,且已经是该父节点的子节点了
if (ObjectUtil.equals(parentNode, childNode.getDeclaredParent())) {
consumer.accept(parentNode, childNode);
return;
}
// 3.子节点存在,但是未与其他节点构成父子关系
if (false == childNode.hasParent()) {
parentNode.addChild(childNode);
}
// 4.子节点存在,且已经与其他节点构成父子关系,但是允许子节点直接修改其父节点
else if (allowOverrideParent) {
childNode.getDeclaredParent().removeDeclaredChild(childNode.getKey());
parentNode.addChild(childNode);
}
// 5.子节点存在,且已经与其他节点构成父子关系,但是不允许子节点直接修改其父节点
else {
throw new IllegalArgumentException(StrUtil.format(
"[{}] has been used as child of [{}], can not be overwrite as child of [{}]",
childNode.getKey(), childNode.getDeclaredParent().getKey(), parentKey
));
}
consumer.accept(parentNode, childNode);
} | @Test
public void getTreeNodesTest() {
final ForestMap<String, String> map = new LinkedForestMap<>(false);
map.linkNodes("a", "b");
map.linkNodes("b", "c");
final List<String> expected = CollUtil.newArrayList("a", "b", "c");
List<String> actual = CollStreamUtil.toList(map.getTreeNodes("a"), TreeEntry::getKey);
assertEquals(expected, actual);
actual = CollStreamUtil.toList(map.getTreeNodes("b"), TreeEntry::getKey);
assertEquals(expected, actual);
actual = CollStreamUtil.toList(map.getTreeNodes("c"), TreeEntry::getKey);
assertEquals(expected, actual);
} |
public static void main(String[] args) throws Exception {
TikaCLI cli = new TikaCLI();
if (cli.testForHelp(args)) {
cli.usage();
return;
} else if (cli.testForBatch(args)) {
String[] batchArgs = BatchCommandLineBuilder.build(args);
BatchProcessDriverCLI batchDriver = new BatchProcessDriverCLI(batchArgs);
batchDriver.execute();
return;
} else if (cli.testForAsync(args)) {
async(args);
return;
}
if (args.length > 0) {
for (String arg : args) {
cli.process(arg);
}
if (cli.pipeMode) {
cli.process("-");
}
} else {
// Started with no arguments. Wait for up to 0.1s to see if
// we have something waiting in standard input and use the
// pipe mode if we have. If no input is seen, start the GUI.
if (System.in.available() == 0) {
Thread.sleep(100);
}
if (System.in.available() > 0) {
cli.process("-");
} else {
cli.process("--gui");
}
}
} | @Test
public void testContentAllOutput() throws Exception {
String[] params = {"-A", resourcePrefix + "testJsonMultipleInts.html"};
TikaCLI.main(params);
String out = outContent.toString(UTF_8.name());
assertTrue(out.contains("this is a title"));
assertTrue(out.contains("body"));
} |
@Override
public Mono<Void> execute(final ServerWebExchange exchange, final ShenyuPluginChain chain) {
String domain = exchange.getAttribute(Constants.HTTP_DOMAIN);
if (StringUtils.isBlank(domain)) {
return chain.execute(exchange);
}
final URI uri = RequestUrlUtils.buildRequestUri(exchange, domain);
exchange.getAttributes().put(Constants.HTTP_URI, uri);
return chain.execute(exchange);
} | @Test
public void testDoExecute() {
when(exchange.getAttribute(Constants.HTTP_DOMAIN)).thenReturn("http://localhost:8090");
when(chain.execute(exchange)).thenReturn(Mono.empty());
StepVerifier.create(uriPlugin.execute(exchange, chain)).expectSubscription().verifyComplete();
assertEquals("http://localhost:8090?queryParam=Hello,World", exchange.getAttributes().get(Constants.HTTP_URI).toString());
// test https
when(exchange.getAttribute(Constants.HTTP_DOMAIN)).thenReturn("https://localhost");
when(shenyuContext.getRealUrl()).thenReturn("/test");
when(chain.execute(exchange)).thenReturn(Mono.empty());
StepVerifier.create(uriPlugin.execute(exchange, chain)).expectSubscription().verifyComplete();
assertEquals("https://localhost/test?queryParam=Hello,World", exchange.getAttributes().get(Constants.HTTP_URI).toString());
// test realUrl
when(exchange.getAttribute(Constants.HTTP_DOMAIN)).thenReturn("http://localhost");
when(shenyuContext.getRealUrl()).thenReturn("/test");
when(chain.execute(exchange)).thenReturn(Mono.empty());
StepVerifier.create(uriPlugin.execute(exchange, chain)).expectSubscription().verifyComplete();
assertEquals("http://localhost/test?queryParam=Hello,World", exchange.getAttributes().get(Constants.HTTP_URI).toString());
// test rewrite
when(exchange.getAttribute(Constants.HTTP_DOMAIN)).thenReturn("http://localhost:8090");
exchange.getAttributes().put(Constants.REWRITE_URI, "/rewrite");
when(chain.execute(exchange)).thenReturn(Mono.empty());
StepVerifier.create(uriPlugin.execute(exchange, chain)).expectSubscription().verifyComplete();
assertEquals("http://localhost:8090/rewrite?queryParam=Hello,World", exchange.getAttributes().get(Constants.HTTP_URI).toString());
// test contains % in the row query
request = MockServerHttpRequest
.get("localhost")
.remoteAddress(new InetSocketAddress(8090))
.queryParam("queryParam", "Hello, World")
.build();
this.exchange = spy(MockServerWebExchange.from(request));
shenyuContext = mock(ShenyuContext.class);
exchange.getAttributes().put(Constants.CONTEXT, shenyuContext);
when(exchange.getAttribute(Constants.HTTP_DOMAIN)).thenReturn("http://localhost:8090/query");
when(chain.execute(exchange)).thenReturn(Mono.empty());
StepVerifier.create(uriPlugin.execute(exchange, chain)).expectSubscription().verifyComplete();
assertEquals("http://localhost:8090/query?queryParam=Hello%2C%20World", exchange.getAttributes().get(Constants.HTTP_URI).toString());
request = MockServerHttpRequest
.get("localhost")
.remoteAddress(new InetSocketAddress(8090))
.queryParam("queryParam", "Hello, World")
.build();
this.exchange = spy(MockServerWebExchange.from(request));
shenyuContext = mock(ShenyuContext.class);
exchange.getAttributes().put(Constants.CONTEXT, shenyuContext);
when(exchange.getAttribute(Constants.HTTP_DOMAIN)).thenReturn("http://localhost:8090/%20/query");
when(chain.execute(exchange)).thenReturn(Mono.empty());
StepVerifier.create(uriPlugin.execute(exchange, chain)).expectSubscription().verifyComplete();
assertEquals("http://localhost:8090/%20/query?queryParam=Hello%2C%20World", exchange.getAttributes().get(Constants.HTTP_URI).toString());
request = MockServerHttpRequest
.get("localhost")
.remoteAddress(new InetSocketAddress(8090))
.queryParam("queryParam", "Hello, World")
.build();
this.exchange = spy(MockServerWebExchange.from(request));
shenyuContext = mock(ShenyuContext.class);
exchange.getAttributes().put(Constants.CONTEXT, shenyuContext);
when(exchange.getAttribute(Constants.HTTP_DOMAIN)).thenReturn("http://localhost:8090/%20/query");
when(chain.execute(exchange)).thenReturn(Mono.empty());
StepVerifier.create(uriPlugin.execute(exchange, chain)).expectSubscription().verifyComplete();
assertEquals("http://localhost:8090/%20/query?queryParam=Hello%2C%20%20World", exchange.getAttributes().get(Constants.HTTP_URI).toString());
request = MockServerHttpRequest
.get("localhost")
.remoteAddress(new InetSocketAddress(8090))
.queryParam("p", "a%2Bb=")
.build();
this.exchange = spy(MockServerWebExchange.from(request));
shenyuContext = mock(ShenyuContext.class);
exchange.getAttributes().put(Constants.CONTEXT, shenyuContext);
when(exchange.getAttribute(Constants.HTTP_DOMAIN)).thenReturn("http://localhost:8090/query");
when(chain.execute(exchange)).thenReturn(Mono.empty());
StepVerifier.create(uriPlugin.execute(exchange, chain)).expectSubscription().verifyComplete();
assertEquals("http://localhost:8090/query?p=a%252Bb%3D", exchange.getAttributes().get(Constants.HTTP_URI).toString());
} |
public boolean isSuppressed(Device device) {
if (suppressedDeviceType.contains(device.type())) {
return true;
}
final Annotations annotations = device.annotations();
if (containsSuppressionAnnotation(annotations)) {
return true;
}
return false;
} | @Test
public void testNotSuppressedDevice() {
Device device = new DefaultDevice(PID,
NON_SUPPRESSED_DID,
Device.Type.SWITCH,
MFR, HW, SW1, SN, CID);
assertFalse(rules.isSuppressed(device));
} |
public static List<BindAddress> validateBindAddresses(ServiceConfiguration config, Collection<String> schemes) {
// migrate the existing configuration properties
List<BindAddress> addresses = migrateBindAddresses(config);
// parse the list of additional bind addresses
Arrays
.stream(StringUtils.split(StringUtils.defaultString(config.getBindAddresses()), ","))
.map(s -> {
Matcher m = BIND_ADDRESSES_PATTERN.matcher(s);
if (!m.matches()) {
throw new IllegalArgumentException("bindAddresses: malformed: " + s);
}
return m;
})
.map(m -> new BindAddress(m.group("name"), URI.create(m.group("url"))))
.forEach(addresses::add);
// apply the filter
if (schemes != null) {
addresses.removeIf(a -> !schemes.contains(a.getAddress().getScheme()));
}
return addresses;
} | @Test(expectedExceptions = IllegalArgumentException.class)
public void testMalformed() {
ServiceConfiguration config = newEmptyConfiguration();
config.setBindAddresses("internal:");
List<BindAddress> addresses = BindAddressValidator.validateBindAddresses(config, null);
assertEquals(0, addresses.size());
} |
Optional<TimeRange> parse(final String shortTimerange) {
if (shortTimerange != null && SHORT_FORMAT_PATTERN.matcher(shortTimerange).matches()) {
final String numberPart = shortTimerange.substring(0, shortTimerange.length() - 1);
final String periodPart = shortTimerange.substring(shortTimerange.length() - 1);
String longPeriodPart = SHORT_TO_LONG_PERIOD_MAPPING.get(periodPart);
if (longPeriodPart != null) {
if ("1".equals(numberPart)) {
longPeriodPart = longPeriodPart.substring(0, longPeriodPart.length() - 1); //removing last "s"
}
return Optional.of(
KeywordRange.create(
"last " + numberPart + " " + longPeriodPart,
"UTC")
);
}
}
return Optional.empty();
} | @Test
void returnsEmptyOptionalOnBadInput() {
assertThat(toTest.parse("#$%")).isEmpty();
assertThat(toTest.parse("13days")).isEmpty();
assertThat(toTest.parse("42x")).isEmpty();
assertThat(toTest.parse("-13days")).isEmpty();
assertThat(toTest.parse("1,5d")).isEmpty();
assertThat(toTest.parse("d13d")).isEmpty();
assertThat(toTest.parse("")).isEmpty();
} |
public void scheduleUpdateIfAbsent(String serviceName, String groupName, String clusters) {
if (!asyncQuerySubscribeService) {
return;
}
String serviceKey = ServiceInfo.getKey(NamingUtils.getGroupedName(serviceName, groupName), clusters);
if (futureMap.get(serviceKey) != null) {
return;
}
synchronized (futureMap) {
if (futureMap.get(serviceKey) != null) {
return;
}
ScheduledFuture<?> future = addTask(new UpdateTask(serviceName, groupName, clusters));
futureMap.put(serviceKey, future);
}
} | @Test
void testScheduleUpdateIfAbsentWithOtherException()
throws InterruptedException, NacosException, NoSuchFieldException, IllegalAccessException {
nacosClientProperties.setProperty(PropertyKeyConst.NAMING_ASYNC_QUERY_SUBSCRIBE_SERVICE, "true");
serviceInfoUpdateService = new ServiceInfoUpdateService(nacosClientProperties, holder, proxy, notifier);
serviceInfoUpdateService.scheduleUpdateIfAbsent(serviceName, group, clusters);
when(proxy.queryInstancesOfService(serviceName, group, clusters, false)).thenThrow(
new RuntimeException("test"));
TimeUnit.MILLISECONDS.sleep(1500);
assertTrue(getScheduleFuture().getDelay(TimeUnit.MILLISECONDS) > 1000);
} |
@Override
public List<Integer> applyTransforms(List<Integer> originalGlyphIds)
{
List<Integer> intermediateGlyphsFromGsub = adjustRephPosition(originalGlyphIds);
intermediateGlyphsFromGsub = repositionGlyphs(intermediateGlyphsFromGsub);
for (String feature : FEATURES_IN_ORDER)
{
if (!gsubData.isFeatureSupported(feature))
{
if (feature.equals(RKRF_FEATURE) && gsubData.isFeatureSupported(VATU_FEATURE))
{
// Create your own rkrf feature from vatu feature
intermediateGlyphsFromGsub = applyRKRFFeature(
gsubData.getFeature(VATU_FEATURE),
intermediateGlyphsFromGsub);
}
LOG.debug("the feature {} was not found", feature);
continue;
}
LOG.debug("applying the feature {}", feature);
ScriptFeature scriptFeature = gsubData.getFeature(feature);
intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature,
intermediateGlyphsFromGsub);
}
return Collections.unmodifiableList(intermediateGlyphsFromGsub);
} | @Test
void testApplyTransforms_locl()
{
// given
List<Integer> glyphsAfterGsub = Arrays.asList(642);
// when
List<Integer> result = gsubWorkerForDevanagari.applyTransforms(getGlyphIds("प्त"));
System.out.println("result: " + result);
// then
assertEquals(glyphsAfterGsub, result);
} |
@PublicAPI(usage = ACCESS)
public static Transformer matching(String packageIdentifier) {
PackageMatchingSliceIdentifier sliceIdentifier = new PackageMatchingSliceIdentifier(packageIdentifier);
String description = "slices matching " + sliceIdentifier.getDescription();
return new Transformer(sliceIdentifier, description);
} | @Test
public void matches_slices() {
JavaClasses classes = importClassesWithContext(Object.class, String.class, List.class, Set.class, Pattern.class);
assertThat(Slices.matching("java.(*)..").transform(classes)).hasSize(2);
assertThat(Slices.matching("(**)").transform(classes)).hasSize(3);
assertThat(Slices.matching("java.(**)").transform(classes)).hasSize(3);
assertThat(Slices.matching("java.(*).(*)").transform(classes)).hasSize(1);
} |
@Override
public Mono<String> generateBundleVersion() {
if (pluginManager.isDevelopment()) {
return Mono.just(String.valueOf(clock.instant().toEpochMilli()));
}
return Flux.fromIterable(new ArrayList<>(pluginManager.getStartedPlugins()))
.sort(Comparator.comparing(PluginWrapper::getPluginId))
.map(pw -> pw.getPluginId() + ':' + pw.getDescriptor().getVersion())
.collect(Collectors.joining())
.map(Hashing.sha256()::hashUnencodedChars)
.map(HashCode::toString);
} | @Test
void generateBundleVersionTest() {
var plugin1 = mock(PluginWrapper.class);
var plugin2 = mock(PluginWrapper.class);
var plugin3 = mock(PluginWrapper.class);
when(pluginManager.getStartedPlugins()).thenReturn(List.of(plugin1, plugin2, plugin3));
var descriptor1 = mock(PluginDescriptor.class);
var descriptor2 = mock(PluginDescriptor.class);
var descriptor3 = mock(PluginDescriptor.class);
when(plugin1.getDescriptor()).thenReturn(descriptor1);
when(plugin2.getDescriptor()).thenReturn(descriptor2);
when(plugin3.getDescriptor()).thenReturn(descriptor3);
when(plugin1.getPluginId()).thenReturn("fake-1");
when(plugin2.getPluginId()).thenReturn("fake-2");
when(plugin3.getPluginId()).thenReturn("fake-3");
when(descriptor1.getVersion()).thenReturn("1.0.0");
when(descriptor2.getVersion()).thenReturn("2.0.0");
when(descriptor3.getVersion()).thenReturn("3.0.0");
var str = "fake-1:1.0.0fake-2:2.0.0fake-3:3.0.0";
var result = Hashing.sha256().hashUnencodedChars(str).toString();
assertThat(result.length()).isEqualTo(64);
pluginService.generateBundleVersion()
.as(StepVerifier::create)
.consumeNextWith(version -> assertThat(version).isEqualTo(result))
.verifyComplete();
var plugin4 = mock(PluginWrapper.class);
var descriptor4 = mock(PluginDescriptor.class);
when(plugin4.getDescriptor()).thenReturn(descriptor4);
when(plugin4.getPluginId()).thenReturn("fake-4");
when(descriptor4.getVersion()).thenReturn("3.0.0");
var str2 = "fake-1:1.0.0fake-2:2.0.0fake-4:3.0.0";
var result2 = Hashing.sha256().hashUnencodedChars(str2).toString();
when(pluginManager.getStartedPlugins()).thenReturn(List.of(plugin1, plugin2, plugin4));
pluginService.generateBundleVersion()
.as(StepVerifier::create)
.consumeNextWith(version -> assertThat(version).isEqualTo(result2))
.verifyComplete();
assertThat(result).isNotEqualTo(result2);
} |
@Nullable
public static <T> T getWithoutException(CompletableFuture<T> future) {
if (isCompletedNormally(future)) {
try {
return future.get();
} catch (InterruptedException | ExecutionException ignored) {
}
}
return null;
} | @Test
void testGetWithoutException() {
final int expectedValue = 1;
final CompletableFuture<Integer> completableFuture = new CompletableFuture<>();
completableFuture.complete(expectedValue);
assertThat(FutureUtils.getWithoutException(completableFuture)).isEqualTo(expectedValue);
} |
public static void validate(
FederationPolicyInitializationContext policyContext, String myType)
throws FederationPolicyInitializationException {
if (myType == null) {
throw new FederationPolicyInitializationException(
"The myType parameter" + " should not be null.");
}
if (policyContext == null) {
throw new FederationPolicyInitializationException(
"The FederationPolicyInitializationContext provided is null. Cannot"
+ " reinitialize " + "successfully.");
}
if (policyContext.getFederationStateStoreFacade() == null) {
throw new FederationPolicyInitializationException(
"The FederationStateStoreFacade provided is null. Cannot"
+ " reinitialize successfully.");
}
if (policyContext.getFederationSubclusterResolver() == null) {
throw new FederationPolicyInitializationException(
"The FederationSubclusterResolver provided is null. Cannot"
+ " reinitialize successfully.");
}
if (policyContext.getSubClusterPolicyConfiguration() == null) {
throw new FederationPolicyInitializationException(
"The SubClusterPolicyConfiguration provided is null. Cannot "
+ "reinitialize successfully.");
}
String intendedType =
policyContext.getSubClusterPolicyConfiguration().getType();
if (!myType.equals(intendedType)) {
throw new FederationPolicyInitializationException(
"The FederationPolicyConfiguration carries a type (" + intendedType
+ ") different then mine (" + myType
+ "). Cannot reinitialize successfully.");
}
} | @Test(expected = FederationPolicyInitializationException.class)
public void nullContext() throws Exception {
FederationPolicyInitializationContextValidator.validate(null,
MockPolicyManager.class.getCanonicalName());
} |
public static IRubyObject deep(final Ruby runtime, final Object input) {
if (input == null) {
return runtime.getNil();
}
final Class<?> cls = input.getClass();
final Rubyfier.Converter converter = CONVERTER_MAP.get(cls);
if (converter != null) {
return converter.convert(runtime, input);
}
return fallbackConvert(runtime, input, cls);
} | @Test
public void testDeepListWithInteger() throws Exception {
List<Integer> data = new ArrayList<>();
data.add(1);
@SuppressWarnings("rawtypes")
RubyArray rubyArray = (RubyArray)Rubyfier.deep(RubyUtil.RUBY, data);
// toJavaArray does not newFromRubyArray inner elements to Java types \o/
assertEquals(RubyFixnum.class, rubyArray.toJavaArray()[0].getClass());
assertEquals(1L, ((RubyFixnum)rubyArray.toJavaArray()[0]).getLongValue());
} |
public static <FnT extends DoFn<?, ?>> DoFnSignature getSignature(Class<FnT> fn) {
return signatureCache.computeIfAbsent(fn, DoFnSignatures::parseSignature);
} | @Test
public void testStateIdDuplicate() throws Exception {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Duplicate");
thrown.expectMessage("StateId");
thrown.expectMessage("my-id");
thrown.expectMessage("myfield1");
thrown.expectMessage("myfield2");
thrown.expectMessage(not(mentionsTimers()));
DoFnSignatures.getSignature(
new DoFn<KV<String, Integer>, Long>() {
@StateId("my-id")
private final StateSpec<ValueState<Integer>> myfield1 =
StateSpecs.value(VarIntCoder.of());
@StateId("my-id")
private final StateSpec<ValueState<Long>> myfield2 = StateSpecs.value(VarLongCoder.of());
@ProcessElement
public void foo(ProcessContext context) {}
}.getClass());
} |
public void addVariable(String variable) throws YarnException {
if (immutableVariables.contains(variable)) {
throw new YarnException("Variable '" + variable + "' is immutable " +
"cannot add to the modified variable list.");
}
knownVariables.add(variable);
} | @Test
public void testDynamicQueueValidation() {
//Setting up queue manager and emulated queue hierarchy
CapacitySchedulerQueueManager qm =
mock(CapacitySchedulerQueueManager.class);
MockQueueHierarchyBuilder.create()
.withQueueManager(qm)
.withQueue("root.unmanaged")
.withDynamicParentQueue("root.managed")
.withQueue("root.unmanagedwithchild.child")
.withQueue("root.leaf")
.build();
when(qm.getQueue(isNull())).thenReturn(null);
MappingRuleValidationContextImpl ctx =
new MappingRuleValidationContextImpl(qm);
try {
ctx.addVariable("%dynamic");
ctx.addVariable("%user");
} catch (YarnException e) {
fail("We don't expect the add variable to fail: " + e.getMessage());
}
assertValidPath(ctx, "%dynamic");
assertValidPath(ctx, "root.%dynamic");
assertValidPath(ctx, "%user.%dynamic");
assertValidPath(ctx, "root.managed.%dynamic");
assertValidPath(ctx, "managed.%dynamic");
assertValidPath(ctx, "managed.static");
assertValidPath(ctx, "managed.static.%dynamic");
assertValidPath(ctx, "managed.static.%dynamic.%dynamic");
assertInvalidPath(ctx, "root.invalid.%dynamic");
assertInvalidPath(ctx, "root.unmanaged.%dynamic");
assertValidPath(ctx, "root.unmanagedwithchild.%user");
assertValidPath(ctx, "unmanagedwithchild.%user");
} |
@Converter(fallback = true)
public static <T> T convertTo(Class<T> type, Exchange exchange, Object value, TypeConverterRegistry registry) {
if (NodeInfo.class.isAssignableFrom(value.getClass())) {
// use a fallback type converter so we can convert the embedded body if the value is NodeInfo
NodeInfo ni = (NodeInfo) value;
// first try to find a Converter for Node
TypeConverter tc = registry.lookup(type, Node.class);
if (tc != null) {
Node node = NodeOverNodeInfo.wrap(ni);
return tc.convertTo(type, exchange, node);
}
// if this does not exist we can also try NodeList (there are some type converters for that) as
// the default Xerces Node implementation also implements NodeList.
tc = registry.lookup(type, NodeList.class);
if (tc != null) {
List<NodeInfo> nil = new LinkedList<>();
nil.add(ni);
return tc.convertTo(type, exchange, toDOMNodeList(nil));
}
} else if (List.class.isAssignableFrom(value.getClass())) {
TypeConverter tc = registry.lookup(type, NodeList.class);
if (tc != null) {
List<NodeInfo> lion = new LinkedList<>();
for (Object o : (List<?>) value) {
if (o instanceof NodeInfo) {
lion.add((NodeInfo) o);
}
}
if (!lion.isEmpty()) {
NodeList nl = toDOMNodeList(lion);
return tc.convertTo(type, exchange, nl);
}
}
} else if (NodeOverNodeInfo.class.isAssignableFrom(value.getClass())) {
// NodeOverNode info is a read-only Node implementation from Saxon. In contrast to the JDK
// com.sun.org.apache.xerces.internal.dom.NodeImpl class it does not implement NodeList, but
// many Camel type converters are based on that interface. Therefore we convert to NodeList and
// try type conversion in the fallback type converter.
TypeConverter tc = registry.lookup(type, NodeList.class);
if (tc != null) {
List<Node> domNodeList = new LinkedList<>();
domNodeList.add((NodeOverNodeInfo) value);
return tc.convertTo(type, exchange, new DOMNodeList(domNodeList));
}
}
return null;
} | @Test
public void convertToNode() {
Node node = context.getTypeConverter().convertTo(Node.class, exchange, doc);
assertNotNull(node);
String string = context.getTypeConverter().convertTo(String.class, exchange, node);
assertEquals(CONTENT, string);
} |
@Override
public byte[] getFileContent(Long configId, String path) throws Exception {
FileClient client = fileConfigService.getFileClient(configId);
Assert.notNull(client, "客户端({}) 不能为空", configId);
return client.getContent(path);
} | @Test
public void testGetFileContent() throws Exception {
// 准备参数
Long configId = 10L;
String path = "tudou.jpg";
// mock 方法
FileClient client = mock(FileClient.class);
when(fileConfigService.getFileClient(eq(10L))).thenReturn(client);
byte[] content = new byte[]{};
when(client.getContent(eq("tudou.jpg"))).thenReturn(content);
// 调用
byte[] result = fileService.getFileContent(configId, path);
// 断言
assertSame(result, content);
} |
public static TableIdentifier fromJson(String json) {
Preconditions.checkArgument(
json != null, "Cannot parse table identifier from invalid JSON: null");
Preconditions.checkArgument(
!json.isEmpty(), "Cannot parse table identifier from invalid JSON: ''");
return JsonUtil.parse(json, TableIdentifierParser::fromJson);
} | @Test
public void testTableIdentifierFromJson() {
String json = "{\"namespace\":[\"accounting\",\"tax\"],\"name\":\"paid\"}";
TableIdentifier identifier = TableIdentifier.of(Namespace.of("accounting", "tax"), "paid");
assertThat(TableIdentifierParser.fromJson(json))
.as("Should be able to deserialize a valid table identifier")
.isEqualTo(identifier);
TableIdentifier identifierWithEmptyNamespace = TableIdentifier.of(Namespace.empty(), "paid");
String jsonWithEmptyNamespace = "{\"namespace\":[],\"name\":\"paid\"}";
assertThat(TableIdentifierParser.fromJson(jsonWithEmptyNamespace))
.as("Should be able to deserialize a valid multi-level table identifier")
.isEqualTo(identifierWithEmptyNamespace);
String identifierMissingNamespace = "{\"name\":\"paid\"}";
assertThat(TableIdentifierParser.fromJson(identifierMissingNamespace))
.as(
"Should implicitly convert a missing namespace into the the empty namespace when parsing")
.isEqualTo(identifierWithEmptyNamespace);
} |
@Override
public Result invoke(final Invocation invocation) throws RpcException {
checkWhetherDestroyed();
// binding attachments into invocation.
// Map<String, Object> contextAttachments = RpcContext.getClientAttachment().getObjectAttachments();
// if (contextAttachments != null && contextAttachments.size() != 0) {
// ((RpcInvocation) invocation).addObjectAttachmentsIfAbsent(contextAttachments);
// }
InvocationProfilerUtils.enterDetailProfiler(invocation, () -> "Router route.");
List<Invoker<T>> invokers = list(invocation);
InvocationProfilerUtils.releaseDetailProfiler(invocation);
checkInvokers(invokers, invocation);
LoadBalance loadbalance = initLoadBalance(invokers, invocation);
RpcUtils.attachInvocationIdIfAsync(getUrl(), invocation);
InvocationProfilerUtils.enterDetailProfiler(
invocation, () -> "Cluster " + this.getClass().getName() + " invoke.");
try {
return doInvoke(invocation, invokers, loadbalance);
} finally {
InvocationProfilerUtils.releaseDetailProfiler(invocation);
}
} | @Disabled(
"RpcContext attachments will be set to Invocation twice, first in ConsumerContextFilter, second AbstractInvoker")
@Test
void testBindingAttachment() {
final String attachKey = "attach";
final String attachValue = "value";
// setup attachment
RpcContext.getClientAttachment().setAttachment(attachKey, attachValue);
Map<String, Object> attachments = RpcContext.getClientAttachment().getObjectAttachments();
Assertions.assertTrue(attachments != null && attachments.size() == 1, "set attachment failed!");
cluster = new AbstractClusterInvoker(dic) {
@Override
protected Result doInvoke(Invocation invocation, List invokers, LoadBalance loadbalance)
throws RpcException {
// attachment will be bind to invocation
String value = invocation.getAttachment(attachKey);
Assertions.assertNotNull(value);
Assertions.assertEquals(attachValue, value, "binding attachment failed!");
return null;
}
};
// invoke
cluster.invoke(invocation);
} |
public static List<String> loadAndModifyConfiguration(String[] args) throws FlinkException {
return ConfigurationParserUtils.loadAndModifyConfiguration(
filterCmdArgs(args, ModifiableClusterConfigurationParserFactory.options()),
BashJavaUtils.class.getSimpleName());
} | @TestTemplate
void testloadAndModifyConfigurationRemoveKeysNotMatched() throws Exception {
String key = "key";
String value = "value";
String removeKey = "removeKey";
String[] args = {
"--configDir",
confDir.toFile().getAbsolutePath(),
String.format("-D%s=%s", key, value),
"--removeKey",
removeKey
};
List<String> list = FlinkConfigLoader.loadAndModifyConfiguration(args);
if (standardYaml) {
assertThat(list)
.containsExactly("test:", " key: " + TEST_CONFIG_VALUE, key + ": " + value);
} else {
assertThat(list)
.containsExactlyInAnyOrder(
TEST_CONFIG_KEY + ": " + TEST_CONFIG_VALUE, key + ": " + value);
}
} |
@Override
public boolean hasSetCookieWithName(String cookieName) {
for (String setCookieValue : getHeaders().getAll(HttpHeaderNames.SET_COOKIE)) {
Cookie cookie = ClientCookieDecoder.STRICT.decode(setCookieValue);
if (cookie.name().equalsIgnoreCase(cookieName)) {
return true;
}
}
return false;
} | @Test
void testHasSetCookieWithName() {
response.getHeaders()
.add(
"Set-Cookie",
"c1=1234; Max-Age=-1; Expires=Tue, 01 Sep 2015 22:49:57 GMT; Path=/; Domain=.netflix.com");
response.getHeaders()
.add(
"Set-Cookie",
"c2=4567; Max-Age=-1; Expires=Tue, 01 Sep 2015 22:49:57 GMT; Path=/; Domain=.netflix.com");
assertTrue(response.hasSetCookieWithName("c1"));
assertTrue(response.hasSetCookieWithName("c2"));
assertFalse(response.hasSetCookieWithName("XX"));
} |
public Grok cachedGrokForPattern(String pattern) {
return cachedGrokForPattern(pattern, false);
} | @Test
public void cachedGrokForPatternWithNamedCaptureOnly() {
final Grok grok = grokPatternRegistry.cachedGrokForPattern("%{TESTNUM}", true);
assertThat(grok.getPatterns()).containsEntry(GROK_PATTERN.name(), GROK_PATTERN.pattern());
} |
@Override
public Map<ExecutionAttemptID, ExecutionSlotAssignment> allocateSlotsFor(
List<ExecutionAttemptID> executionAttemptIds) {
Map<ExecutionAttemptID, ExecutionSlotAssignment> result = new HashMap<>();
Map<SlotRequestId, ExecutionAttemptID> remainingExecutionsToSlotRequest =
new HashMap<>(executionAttemptIds.size());
List<PhysicalSlotRequest> physicalSlotRequests =
new ArrayList<>(executionAttemptIds.size());
for (ExecutionAttemptID executionAttemptId : executionAttemptIds) {
if (requestedPhysicalSlots.containsKeyA(executionAttemptId)) {
result.put(
executionAttemptId,
new ExecutionSlotAssignment(
executionAttemptId,
requestedPhysicalSlots.getValueByKeyA(executionAttemptId)));
} else {
final SlotRequestId slotRequestId = new SlotRequestId();
final ResourceProfile resourceProfile =
resourceProfileRetriever.apply(executionAttemptId);
Collection<TaskManagerLocation> preferredLocations =
preferredLocationsRetriever.getPreferredLocations(
executionAttemptId.getExecutionVertexId(), Collections.emptySet());
final SlotProfile slotProfile =
SlotProfile.priorAllocation(
resourceProfile,
resourceProfile,
preferredLocations,
Collections.emptyList(),
Collections.emptySet());
final PhysicalSlotRequest request =
new PhysicalSlotRequest(
slotRequestId, slotProfile, slotWillBeOccupiedIndefinitely);
physicalSlotRequests.add(request);
remainingExecutionsToSlotRequest.put(slotRequestId, executionAttemptId);
}
}
result.putAll(
allocatePhysicalSlotsFor(remainingExecutionsToSlotRequest, physicalSlotRequests));
return result;
} | @Test
void testPhysicalSlotReleasesLogicalSlots() throws Exception {
final AllocationContext context = new AllocationContext();
final CompletableFuture<LogicalSlot> slotFuture =
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
final TestingPayload payload = new TestingPayload();
slotFuture.thenAccept(logicalSlot -> logicalSlot.tryAssignPayload(payload));
final SlotRequestId slotRequestId =
context.getSlotProvider().getFirstRequestOrFail().getSlotRequestId();
final TestingPhysicalSlot physicalSlot =
context.getSlotProvider().getFirstResponseOrFail().get();
assertThat(payload.getTerminalStateFuture()).isNotDone();
assertThat(physicalSlot.getPayload()).isNotNull();
physicalSlot.getPayload().release(new Throwable());
assertThat(payload.getTerminalStateFuture()).isDone();
assertThat(context.getSlotProvider().getCancellations()).containsKey(slotRequestId);
context.allocateSlotsFor(EXECUTION_ATTEMPT_ID);
// there should be one more physical slot allocation, as the first allocation should be
// removed after releasing all logical slots
assertThat(context.getSlotProvider().getRequests()).hasSize(2);
} |
public URL getInterNodeListener(
final Function<URL, Integer> portResolver
) {
return getInterNodeListener(portResolver, LOGGER);
} | @Test
public void shouldUseExplicitInterNodeListenerIfSetToLocalHost() {
// Given:
final URL expected = url("https://localHost:52368");
final KsqlRestConfig config = new KsqlRestConfig(ImmutableMap.<String, Object>builder()
.putAll(MIN_VALID_CONFIGS)
.put(ADVERTISED_LISTENER_CONFIG, expected.toString())
.build()
);
// When:
final URL actual = config.getInterNodeListener(portResolver, logger);
// Then:
assertThat(actual, is(expected));
verifyLogsInterNodeListener(expected, QUOTED_INTER_NODE_LISTENER_CONFIG);
verifyLogsLoopBackWarning(expected, QUOTED_INTER_NODE_LISTENER_CONFIG);
verifyNoMoreInteractions(logger);
} |
public static String encodeInUtf8(String url) {
String[] parts = url.split("/");
StringBuilder builder = new StringBuilder();
for (int i = 0; i < parts.length; i++) {
String part = parts[i];
builder.append(URLEncoder.encode(part, StandardCharsets.UTF_8));
if (i < parts.length - 1) {
builder.append('/');
}
}
if (url.endsWith("/")) {
builder.append('/');
}
return builder.toString();
} | @Test
public void shouldKeepPrecedingSlash() {
assertThat(UrlUtil.encodeInUtf8("/a%b/c%d"), is("/a%25b/c%25d"));
} |
public Set<Long> findCmdIds(List<Status> statusList) throws JobDoesNotExistException {
Set<Long> set = new HashSet<>();
for (Map.Entry<Long, CmdInfo> x : mInfoMap.entrySet()) {
if (statusList.isEmpty()
|| statusList.contains(getCmdStatus(
x.getValue().getJobControlId()))) {
Long key = x.getKey();
set.add(key);
}
}
return set;
} | @Test
public void testFindCmdIdsForComplete() throws Exception {
long completedId = generateMigrateCommandForStatus(Status.COMPLETED);
mSearchingCriteria.add(Status.COMPLETED);
Set<Long> completedCmdIds = mCmdJobTracker.findCmdIds(mSearchingCriteria);
Assert.assertEquals(completedCmdIds.size(), 1);
Assert.assertTrue(completedCmdIds.contains(completedId));
} |
@ApiOperation(value = "Update a user’s info", tags = { "Users" }, nickname = "updateUserInfo")
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Indicates the user was found and the info has been updated."),
@ApiResponse(code = 400, message = "Indicates the value was missing from the request body."),
@ApiResponse(code = 404, message = "Indicates the requested user was not found or the user does not have info for the given key. Status description contains additional information about the error.")
})
@PutMapping(value = "/identity/users/{userId}/info/{key}", produces = "application/json")
public UserInfoResponse setUserInfo(@ApiParam(name = "userId") @PathVariable("userId") String userId, @ApiParam(name = "key") @PathVariable("key") String key, @RequestBody UserInfoRequest userRequest) {
User user = getUserFromRequest(userId);
String validKey = getValidKeyFromRequest(user, key);
if (userRequest.getValue() == null) {
throw new FlowableIllegalArgumentException("The value cannot be null.");
}
if (userRequest.getKey() == null || validKey.equals(userRequest.getKey())) {
identityService.setUserInfo(user.getId(), key, userRequest.getValue());
} else {
throw new FlowableIllegalArgumentException("Key provided in request body does not match the key in the resource URL.");
}
return restResponseFactory.createUserInfoResponse(key, userRequest.getValue(), user.getId());
} | @Test
public void testGetUserInfoCollection() throws Exception {
User savedUser = null;
try {
User newUser = identityService.newUser("testuser");
newUser.setFirstName("Fred");
newUser.setLastName("McDonald");
newUser.setEmail("[email protected]");
identityService.saveUser(newUser);
savedUser = newUser;
identityService.setUserInfo(newUser.getId(), "key1", "Value 1");
identityService.setUserInfo(newUser.getId(), "key2", "Value 2");
CloseableHttpResponse response = executeRequest(
new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_USER_INFO_COLLECTION, newUser.getId())), HttpStatus.SC_OK);
JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertThat(responseNode).isNotNull();
assertThat(responseNode.isArray()).isTrue();
assertThatJson(responseNode)
.isEqualTo("[ {"
+ " key: 'key1',"
+ " url: '" + SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_USER_INFO, newUser.getId(), "key1") + "'"
+ "},"
+ "{"
+ " key: 'key2',"
+ " url: '" + SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_USER_INFO, newUser.getId(), "key2") + "'"
+ "}"
+ "]");
} finally {
// Delete user after test passes or fails
if (savedUser != null) {
identityService.deleteUser(savedUser.getId());
}
}
} |
public RowExpression extract(PlanNode node)
{
return node.accept(new Visitor(domainTranslator, functionAndTypeManager), null);
} | @Test
public void testInnerJoinWithFalseFilter()
{
Map<VariableReferenceExpression, ColumnHandle> leftAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(AV, BV, CV)));
TableScanNode leftScan = tableScanNode(leftAssignments);
Map<VariableReferenceExpression, ColumnHandle> rightAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(DV, EV, FV)));
TableScanNode rightScan = tableScanNode(rightAssignments);
PlanNode node = new JoinNode(
Optional.empty(),
newId(),
JoinType.INNER,
leftScan,
rightScan,
ImmutableList.of(new EquiJoinClause(AV, DV)),
ImmutableList.<VariableReferenceExpression>builder()
.addAll(leftScan.getOutputVariables())
.addAll(rightScan.getOutputVariables())
.build(),
Optional.of(FALSE_CONSTANT),
Optional.empty(),
Optional.empty(),
Optional.empty(),
ImmutableMap.of());
RowExpression effectivePredicate = effectivePredicateExtractor.extract(node);
assertEquals(effectivePredicate, FALSE_CONSTANT);
} |
@Override
public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception {
if (args.size() < 2) {
printInfo(err);
return 1;
}
int index = 0;
String input = args.get(index);
String option = "all";
if ("-o".equals(input)) {
option = args.get(1);
index += 2;
}
if (!OPTIONS.contains(option) || (args.size() - index < 1)) {
printInfo(err);
return 1;
}
input = args.get(index++);
if (!REPORT.equals(option)) {
if (args.size() - index < 1) {
printInfo(err);
return 1;
}
}
if (ALL.equals(option)) {
return recoverAll(input, args.get(index), out, err);
} else if (PRIOR.equals(option)) {
return recoverPrior(input, args.get(index), out, err);
} else if (AFTER.equals(option)) {
return recoverAfter(input, args.get(index), out, err);
} else if (REPORT.equals(option)) {
return reportOnly(input, out, err);
} else {
return 1;
}
} | @Test
void repairAllCorruptRecord() throws Exception {
String output = run(new DataFileRepairTool(), "-o", "all", corruptRecordFile.getPath(), repairedFile.getPath());
assertTrue(output.contains("Number of blocks: 3 Number of corrupt blocks: 1"), output);
assertTrue(output.contains("Number of records: 8 Number of corrupt records: 2"), output);
checkFileContains(repairedFile, "apple", "banana", "celery", "date", "guava", "hazelnut");
} |
public Output run(RunContext runContext) throws Exception {
Logger logger = runContext.logger();
URI from = new URI(runContext.render(this.uri));
File tempFile = runContext.workingDir().createTempFile(filenameFromURI(from)).toFile();
// output
Output.OutputBuilder builder = Output.builder();
// do it
try (
ReactorStreamingHttpClient client = this.streamingClient(runContext, this.method);
BufferedOutputStream output = new BufferedOutputStream(new FileOutputStream(tempFile));
) {
@SuppressWarnings("unchecked")
HttpRequest<String> request = this.request(runContext);
Long size = client
.exchangeStream(request)
.map(throwFunction(response -> {
if (builder.code == null) {
builder
.code(response.code())
.headers(response.getHeaders().asMap());
}
if (response.getBody().isPresent()) {
byte[] bytes = response.getBody().get().toByteArray();
output.write(bytes);
return (long) bytes.length;
} else {
return 0L;
}
}))
.reduce(Long::sum)
.block();
if (size == null) {
size = 0L;
}
if (builder.headers != null && builder.headers.containsKey("Content-Length")) {
long length = Long.parseLong(builder.headers.get("Content-Length").getFirst());
if (length != size) {
throw new IllegalStateException("Invalid size, got " + size + ", expected " + length);
}
}
output.flush();
runContext.metric(Counter.of("response.length", size, this.tags(request, null)));
builder.length(size);
if (size == 0) {
if (this.failOnEmptyResponse) {
throw new HttpClientResponseException("No response from server", HttpResponse.status(HttpStatus.SERVICE_UNAVAILABLE));
} else {
logger.warn("File '{}' is empty", from);
}
}
String filename = null;
if (builder.headers != null && builder.headers.containsKey("Content-Disposition")) {
String contentDisposition = builder.headers.get("Content-Disposition").getFirst();
filename = filenameFromHeader(runContext, contentDisposition);
}
builder.uri(runContext.storage().putFile(tempFile, filename));
logger.debug("File '{}' downloaded to '{}'", from, builder.uri);
return builder.build();
}
} | @Test
void noResponse() {
EmbeddedServer embeddedServer = applicationContext.getBean(EmbeddedServer.class);
embeddedServer.start();
Download task = Download.builder()
.id(DownloadTest.class.getSimpleName())
.type(DownloadTest.class.getName())
.uri(embeddedServer.getURI() + "/204")
.build();
RunContext runContext = TestsUtils.mockRunContext(this.runContextFactory, task, ImmutableMap.of());
HttpClientResponseException exception = assertThrows(
HttpClientResponseException.class,
() -> task.run(runContext)
);
assertThat(exception.getMessage(), is("No response from server"));
} |
@Override
public ProducerImpl.OpSendMsg createOpSendMsg() {
throw new UnsupportedOperationException();
} | @Test(expectedExceptions = UnsupportedOperationException.class)
public void testCreateOpSendMsg() {
RawBatchMessageContainerImpl container = new RawBatchMessageContainerImpl();
container.createOpSendMsg();
} |
@Override
public List<Integer> applyTransforms(List<Integer> originalGlyphIds)
{
List<Integer> intermediateGlyphsFromGsub = adjustRephPosition(originalGlyphIds);
intermediateGlyphsFromGsub = repositionGlyphs(intermediateGlyphsFromGsub);
for (String feature : FEATURES_IN_ORDER)
{
if (!gsubData.isFeatureSupported(feature))
{
if (feature.equals(RKRF_FEATURE) && gsubData.isFeatureSupported(VATU_FEATURE))
{
// Create your own rkrf feature from vatu feature
intermediateGlyphsFromGsub = applyRKRFFeature(
gsubData.getFeature(VATU_FEATURE),
intermediateGlyphsFromGsub);
}
LOG.debug("the feature {} was not found", feature);
continue;
}
LOG.debug("applying the feature {}", feature);
ScriptFeature scriptFeature = gsubData.getFeature(feature);
intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature,
intermediateGlyphsFromGsub);
}
return Collections.unmodifiableList(intermediateGlyphsFromGsub);
} | @Test
void testApplyTransforms_abvs()
{
// given
List<Integer> glyphsAfterGsub = Arrays.asList(92,255,92,258,91,102,336);
// when
List<Integer> result = gsubWorkerForGujarati.applyTransforms(getGlyphIds("રેંરૈંર્યાં"));
// then
assertEquals(glyphsAfterGsub, result);
} |
public boolean eval(ContentFile<?> file) {
// TODO: detect the case where a column is missing from the file using file's max field id.
return new MetricsEvalVisitor().eval(file);
} | @Test
public void testNoNulls() {
boolean shouldRead = new StrictMetricsEvaluator(SCHEMA, isNull("all_nulls")).eval(FILE);
assertThat(shouldRead).as("Should match: all values are null").isTrue();
shouldRead = new StrictMetricsEvaluator(SCHEMA, isNull("some_nulls")).eval(FILE);
assertThat(shouldRead).as("Should not match: not all values are null").isFalse();
shouldRead = new StrictMetricsEvaluator(SCHEMA, isNull("no_nulls")).eval(FILE);
assertThat(shouldRead).as("Should not match: no values are null").isFalse();
} |
@Override
public SessionStore<K, V> build() {
return new MeteredSessionStore<>(
maybeWrapCaching(maybeWrapLogging(storeSupplier.get())),
storeSupplier.metricsScope(),
keySerde,
valueSerde,
time);
} | @Test
public void shouldHaveCachingStoreWhenEnabled() {
setUp();
final SessionStore<String, String> store = builder.withCachingEnabled().build();
final StateStore wrapped = ((WrappedStateStore) store).wrapped();
assertThat(store, instanceOf(MeteredSessionStore.class));
assertThat(wrapped, instanceOf(CachingSessionStore.class));
} |
@Override
public boolean containsChar(K name, char value) {
return false;
} | @Test
public void testContainsChar() {
assertFalse(HEADERS.containsChar("name1", 'x'));
} |
@Override
public SelLong field(SelString field) {
String fieldName = field.getInternalVal();
if ("SUNDAY".equals(fieldName)) {
return SelLong.of(DateTimeConstants.SUNDAY);
}
throw new UnsupportedOperationException(type() + " DO NOT support accessing field: " + field);
} | @Test
public void testSundayField() {
SelLong res = SelMiscFunc.INSTANCE.field(SelString.of("SUNDAY"));
assertEquals(7, res.longVal());
} |
@Override
public boolean dropTable(TableIdentifier identifier, boolean purge) {
TableOperations ops = newTableOps(identifier);
TableMetadata lastMetadata = null;
if (purge) {
try {
lastMetadata = ops.current();
} catch (NotFoundException e) {
LOG.warn(
"Failed to load table metadata for table: {}, continuing drop without purge",
identifier,
e);
}
}
int deletedRecords =
execute(
(schemaVersion == JdbcUtil.SchemaVersion.V1)
? JdbcUtil.V1_DROP_TABLE_SQL
: JdbcUtil.V0_DROP_TABLE_SQL,
catalogName,
JdbcUtil.namespaceToString(identifier.namespace()),
identifier.name());
if (deletedRecords == 0) {
LOG.info("Skipping drop, table does not exist: {}", identifier);
return false;
}
if (purge && lastMetadata != null) {
CatalogUtil.dropTableData(ops.io(), lastMetadata);
}
LOG.info("Dropped table: {}", identifier);
return true;
} | @Test
public void testDropTable() {
TableIdentifier testTable = TableIdentifier.of("db", "ns1", "ns2", "tbl");
TableIdentifier testTable2 = TableIdentifier.of("db", "ns1", "ns2", "tbl2");
catalog.createTable(testTable, SCHEMA, PartitionSpec.unpartitioned());
catalog.createTable(testTable2, SCHEMA, PartitionSpec.unpartitioned());
catalog.dropTable(testTable);
assertThat(catalog.listTables(testTable.namespace())).doesNotContain(testTable);
catalog.dropTable(testTable2);
assertThatThrownBy(() -> catalog.listTables(testTable2.namespace()))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: db.ns1.ns2");
assertThat(catalog.dropTable(TableIdentifier.of("db", "tbl-not-exists"))).isFalse();
} |
public PaginationContext createPaginationContext(final Collection<ExpressionSegment> expressions, final ProjectionsContext projectionsContext, final List<Object> params) {
Optional<String> rowNumberAlias = findRowNumberAlias(projectionsContext);
if (!rowNumberAlias.isPresent()) {
return new PaginationContext(null, null, params);
}
Collection<AndPredicate> andPredicates = expressions.stream().flatMap(each -> ExpressionExtractUtils.getAndPredicates(each).stream()).collect(Collectors.toList());
Collection<BinaryOperationExpression> rowNumberPredicates = getRowNumberPredicates(andPredicates, rowNumberAlias.get());
return rowNumberPredicates.isEmpty() ? new PaginationContext(null, null, params) : createPaginationWithRowNumber(rowNumberPredicates, params);
} | @Test
void assertCreatePaginationContextWhenRowNumberAliasNotPresent() {
ProjectionsContext projectionsContext = new ProjectionsContext(0, 0, false, Collections.emptyList());
PaginationContext paginationContext =
new RowNumberPaginationContextEngine(new OracleDatabaseType()).createPaginationContext(Collections.emptyList(), projectionsContext, Collections.emptyList());
assertFalse(paginationContext.getOffsetSegment().isPresent());
assertFalse(paginationContext.getRowCountSegment().isPresent());
} |
@Override
public void setConfigAttributes(Object attributes) {
clear();
if (attributes == null) {
return;
}
List<Map> attrList = (List<Map>) attributes;
for (Map attrMap : attrList) {
String type = (String) attrMap.get("artifactTypeValue");
if (TestArtifactConfig.TEST_PLAN_DISPLAY_NAME.equals(type) || BuildArtifactConfig.ARTIFACT_PLAN_DISPLAY_NAME.equals(type)) {
String source = (String) attrMap.get(BuiltinArtifactConfig.SRC);
String destination = (String) attrMap.get(BuiltinArtifactConfig.DEST);
if (source.trim().isEmpty() && destination.trim().isEmpty()) {
continue;
}
if (TestArtifactConfig.TEST_PLAN_DISPLAY_NAME.equals(type)) {
this.add(new TestArtifactConfig(source, destination));
} else {
this.add(new BuildArtifactConfig(source, destination));
}
} else {
String artifactId = (String) attrMap.get(PluggableArtifactConfig.ID);
String storeId = (String) attrMap.get(PluggableArtifactConfig.STORE_ID);
String pluginId = (String) attrMap.get("pluginId");
Map<String, Object> userSpecifiedConfiguration = (Map<String, Object>) attrMap.get("configuration");
PluggableArtifactConfig pluggableArtifactConfig = new PluggableArtifactConfig(artifactId, storeId);
this.add(pluggableArtifactConfig);
if (userSpecifiedConfiguration == null) {
return;
}
if (StringUtils.isBlank(pluginId)) {
Configuration configuration = pluggableArtifactConfig.getConfiguration();
for (String key : userSpecifiedConfiguration.keySet()) {
Map<String, String> configurationMetadata = (Map<String, String>) userSpecifiedConfiguration.get(key);
if (configurationMetadata != null) {
boolean isSecure = Boolean.parseBoolean(configurationMetadata.get("isSecure"));
if (configuration.getProperty(key) == null) {
configuration.addNewConfiguration(key, isSecure);
}
if (isSecure) {
configuration.getProperty(key).setEncryptedValue(new EncryptedConfigurationValue(configurationMetadata.get("value")));
} else {
configuration.getProperty(key).setConfigurationValue(new ConfigurationValue(configurationMetadata.get("value")));
}
}
}
} else {
for (Map.Entry<String, Object> configuration : userSpecifiedConfiguration.entrySet()) {
pluggableArtifactConfig.getConfiguration().addNewConfigurationWithValue(configuration.getKey(), String.valueOf(configuration.getValue()), false);
}
}
}
}
} | @Test
public void setConfigAttributes_shouldSetExternalArtifactWithPlainTextValuesIfPluginIdIsProvided() {
ArtifactPluginInfo artifactPluginInfo = mock(ArtifactPluginInfo.class);
PluginDescriptor pluginDescriptor = mock(PluginDescriptor.class);
when(artifactPluginInfo.getDescriptor()).thenReturn(pluginDescriptor);
when(pluginDescriptor.id()).thenReturn("cd.go.artifact.foo");
PluginConfiguration image = new PluginConfiguration("Image", new Metadata(true, true));
PluginConfiguration tag = new PluginConfiguration("Tag", new Metadata(true, false));
ArrayList<PluginConfiguration> pluginMetadata = new ArrayList<>();
pluginMetadata.add(image);
pluginMetadata.add(tag);
when(artifactPluginInfo.getArtifactConfigSettings()).thenReturn(new PluggableInstanceSettings(pluginMetadata));
ArtifactMetadataStore.instance().setPluginInfo(artifactPluginInfo);
Map<Object, Object> configurationMap1 = new HashMap<>();
configurationMap1.put("Image", "gocd/gocd-server");
configurationMap1.put("Tag", "v18.6.0");
HashMap<String, Object> artifactPlan1 = new HashMap<>();
artifactPlan1.put("artifactTypeValue", "Pluggable Artifact");
artifactPlan1.put("id", "artifactId");
artifactPlan1.put("storeId", "storeId");
artifactPlan1.put("pluginId", "cd.go.artifact.foo");
artifactPlan1.put("configuration", configurationMap1);
List<Map> artifactPlansList = new ArrayList<>();
artifactPlansList.add(artifactPlan1);
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.setConfigAttributes(artifactPlansList);
assertThat(artifactTypeConfigs.size(), is(1));
PluggableArtifactConfig artifactConfig = (PluggableArtifactConfig) artifactTypeConfigs.get(0);
assertThat(artifactConfig.getArtifactType(), is(ArtifactType.external));
assertThat(artifactConfig.getId(), is("artifactId"));
assertThat(artifactConfig.getStoreId(), is("storeId"));
assertThat(artifactConfig.getConfiguration().getProperty("Image").isSecure(), is(false));
} |
static void validateBundlingRelatedOptions(SamzaPipelineOptions pipelineOptions) {
if (pipelineOptions.getMaxBundleSize() > 1) {
final Map<String, String> configs =
pipelineOptions.getConfigOverride() == null
? new HashMap<>()
: pipelineOptions.getConfigOverride();
final JobConfig jobConfig = new JobConfig(new MapConfig(configs));
// Validate that the threadPoolSize is not override in the code
checkArgument(
jobConfig.getThreadPoolSize() <= 1,
JOB_CONTAINER_THREAD_POOL_SIZE
+ " config should be replaced with SamzaPipelineOptions.numThreadsForProcessElement");
}
} | @Test(expected = IllegalArgumentException.class)
public void testBundleEnabledInMultiThreadedModeThrowsException() {
SamzaPipelineOptions mockOptions = mock(SamzaPipelineOptions.class);
Map<String, String> config = ImmutableMap.of(JOB_CONTAINER_THREAD_POOL_SIZE, "10");
when(mockOptions.getMaxBundleSize()).thenReturn(2L);
when(mockOptions.getConfigOverride()).thenReturn(config);
validateBundlingRelatedOptions(mockOptions);
} |
public Optional<ClusterResources> resources() {
return resources;
} | @Test
public void test_resize() {
var min = new ClusterResources(7, 1, new NodeResources( 2, 10, 384, 1));
var now = new ClusterResources(7, 1, new NodeResources( 3.4, 16.2, 450.1, 1));
var max = new ClusterResources(7, 1, new NodeResources( 4, 32, 768, 1));
var fixture = DynamicProvisioningTester.fixture()
.awsProdSetup(true)
.capacity(Capacity.from(min, max))
.initialResources(Optional.of(now))
.build();
var initialNodes = fixture.nodes().asList();
fixture.tester().clock().advance(Duration.ofDays(2));
fixture.loader().applyLoad(new Load(0.06, 0.52, 0.27, 0, 0), 100);
var autoscaling = fixture.autoscale();
fixture.tester().assertResources("Scaling down",
7, 1, 2, 15.8, 384.0,
autoscaling);
fixture.deploy(Capacity.from(autoscaling.resources().get()));
assertEquals("Initial nodes are kept", initialNodes, fixture.nodes().asList());
} |
public static Builder newBuilder() {
return new Builder();
} | @Test
public void testBuilderThrowsExceptionWhenParentTokenMissing() {
assertThrows(
"parentToken",
IllegalStateException.class,
() ->
PartitionMetadata.newBuilder()
.setPartitionToken(PARTITION_TOKEN)
.setStartTimestamp(START_TIMESTAMP)
.setEndTimestamp(END_TIMESTAMP)
.setHeartbeatMillis(10)
.setState(State.CREATED)
.setWatermark(WATERMARK)
.setCreatedAt(CREATED_AT)
.build());
} |
@SuppressWarnings("unchecked")
@Override
public <T extends Statement> ConfiguredStatement<T> inject(
final ConfiguredStatement<T> statement
) {
try {
if (statement.getStatement() instanceof CreateAsSelect) {
registerForCreateAs((ConfiguredStatement<? extends CreateAsSelect>) statement);
} else if (statement.getStatement() instanceof CreateSource) {
registerForCreateSource((ConfiguredStatement<? extends CreateSource>) statement);
}
} catch (final KsqlStatementException e) {
throw e;
} catch (final KsqlException e) {
throw new KsqlStatementException(
ErrorMessageUtil.buildErrorMessage(e),
statement.getMaskedStatementText(),
e.getCause());
}
// Remove schema id from SessionConfig
return stripSchemaIdConfig(statement);
} | @Test
public void shouldRegisterKeyOverrideSchemaAvroForCreateAs()
throws IOException, RestClientException {
// Given:
when(schemaRegistryClient.register(anyString(), any(ParsedSchema.class))).thenReturn(1);
final SchemaAndId keySchemaAndId = SchemaAndId.schemaAndId(SCHEMA.value(), AVRO_SCHEMA, 1);
final SchemaAndId valueSchemaAndId = SchemaAndId.schemaAndId(SCHEMA.value(), AVRO_SCHEMA, 1);
givenStatement("CREATE STREAM sink WITH (key_schema_id=1, value_schema_id=1, partitions=1"
+ ") AS SELECT * FROM SOURCE;", Pair.of(keySchemaAndId, valueSchemaAndId));
// When:
injector.inject(statement);
// Then:
verify(schemaRegistryClient).register("SINK-key", AVRO_SCHEMA);
verify(schemaRegistryClient).register("SINK-value", AVRO_SCHEMA);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.