focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
protected void write(final MySQLPacketPayload payload) {
for (Object each : data) {
if (null == each) {
payload.writeInt1(NULL);
continue;
}
writeDataIntoPayload(payload, each);
}
} | @Test
void assertLocalDateTime() {
String localDateTimeStr = "2021-08-23T17:30:30";
LocalDateTime dateTime = LocalDateTime.parse(localDateTimeStr, DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss"));
MySQLTextResultSetRowPacket actual = new MySQLTextResultSetRowPacket(Collections.singletonList(dateTime));
actual.write(payload);
verify(payload).writeStringLenenc(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").format(LocalDateTime.parse(localDateTimeStr, DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss"))));
} |
public static UpdateRequirement fromJson(String json) {
return JsonUtil.parse(json, UpdateRequirementParser::fromJson);
} | @Test
public void testAssertTableDoesNotExistFromJson() {
String requirementType = UpdateRequirementParser.ASSERT_TABLE_DOES_NOT_EXIST;
String json = "{\"type\":\"assert-create\"}";
UpdateRequirement expected = new UpdateRequirement.AssertTableDoesNotExist();
assertEquals(requirementType, expected, UpdateRequirementParser.fromJson(json));
} |
Subscription addSubscription(final String channel, final int streamId)
{
return addSubscription(channel, streamId, defaultAvailableImageHandler, defaultUnavailableImageHandler);
} | @Test
void addSubscriptionShouldNotifyMediaDriver()
{
whenReceiveBroadcastOnMessage(
ControlProtocolEvents.ON_SUBSCRIPTION_READY,
subscriptionReadyBuffer,
(buffer) ->
{
subscriptionReady.correlationId(CORRELATION_ID);
return SubscriptionReadyFlyweight.LENGTH;
});
conductor.addSubscription(CHANNEL, STREAM_ID_1);
verify(driverProxy).addSubscription(CHANNEL, STREAM_ID_1);
} |
@Override
public CompletableFuture<RemovedTaskResult> remove(final TaskId taskId) {
final CompletableFuture<RemovedTaskResult> future = new CompletableFuture<>();
tasksAndActionsLock.lock();
try {
tasksAndActions.add(TaskAndAction.createRemoveTask(taskId, future));
tasksAndActionsCondition.signalAll();
} finally {
tasksAndActionsLock.unlock();
}
return future;
} | @Test
public void shouldThrowIfRemovingUpdatingActiveTaskFailsWithStreamsException() throws Exception {
final StreamTask task = statefulTask(TASK_0_0, mkSet(TOPIC_PARTITION_A_0)).inState(State.RESTORING).build();
final StreamsException streamsException = new StreamsException("Something happened", task.id());
setupShouldThrowIfRemovingUpdatingStatefulTaskFailsWithException(task, streamsException);
final CompletableFuture<StateUpdater.RemovedTaskResult> future = stateUpdater.remove(task.id());
verifyRemovingUpdatingStatefulTaskFails(future, task, streamsException, true);
} |
@Override
public T next() {
if (this.next != null || hasNext()) {
final T out = this.next;
this.next = null;
return out;
} else {
throw new NoSuchElementException();
}
} | @Test
void testNext() {
try {
// create the resettable Iterator
SpillingResettableIterator<IntValue> iterator =
new SpillingResettableIterator<IntValue>(
this.reader,
this.serializer,
this.memman,
this.ioman,
2,
this.memOwner);
// open the iterator
iterator.open();
IntValue record;
int cnt = 0;
while (cnt < NUM_TESTRECORDS) {
record = iterator.next();
assertThat(record).withFailMessage("Record was not read from iterator").isNotNull();
cnt++;
}
assertThatThrownBy(iterator::next)
.withFailMessage("Too many records were read from iterator.")
.isInstanceOf(NoSuchElementException.class);
iterator.close();
} catch (Exception ex) {
ex.printStackTrace();
fail("Test encountered an exception.");
}
} |
public byte[] encode(String val, String delimiters) {
return codecs[0].encode(val);
} | @Test
public void testEncodeChinesePersonNameUTF8() {
assertArrayEquals(CHINESE_PERSON_NAME_UTF8_BYTES,
utf8().encode(CHINESE_PERSON_NAME_UTF8, PN_DELIMS));
} |
public byte[] getBytes() {
return value.toByteArray();
} | @Test
public void testGetBytes() {
assertArrayEquals("[] equal after getBytes", new byte[] {}, ByteKey.EMPTY.getBytes());
assertArrayEquals("[00] equal after getBytes", new byte[] {0x00}, ByteKey.of(0x00).getBytes());
} |
public static String expandIP(String netAddress, int part) {
netAddress = netAddress.toUpperCase();
// expand netAddress
int separatorCount = StringUtils.countMatches(netAddress, ":");
int padCount = part - separatorCount;
if (padCount > 0) {
StringBuilder padStr = new StringBuilder(":");
for (int i = 0; i < padCount; i++) {
padStr.append(":");
}
netAddress = StringUtils.replace(netAddress, "::", padStr.toString());
}
// pad netAddress
String[] strArray = StringUtils.splitPreserveAllTokens(netAddress, ":");
for (int i = 0; i < strArray.length; i++) {
if (strArray[i].length() < 4) {
strArray[i] = StringUtils.leftPad(strArray[i], 4, '0');
}
}
// output
StringBuilder sb = new StringBuilder();
for (int i = 0; i < strArray.length; i++) {
sb.append(strArray[i]);
if (i != strArray.length - 1) {
sb.append(":");
}
}
return sb.toString();
} | @Test
public void testExpandIP() {
Assert.assertEquals(AclUtils.expandIP("::", 8), "0000:0000:0000:0000:0000:0000:0000:0000");
Assert.assertEquals(AclUtils.expandIP("::1", 8), "0000:0000:0000:0000:0000:0000:0000:0001");
Assert.assertEquals(AclUtils.expandIP("3::", 8), "0003:0000:0000:0000:0000:0000:0000:0000");
Assert.assertEquals(AclUtils.expandIP("2::2", 8), "0002:0000:0000:0000:0000:0000:0000:0002");
Assert.assertEquals(AclUtils.expandIP("4::aac4:92", 8), "0004:0000:0000:0000:0000:0000:AAC4:0092");
Assert.assertEquals(AclUtils.expandIP("ab23:56:901a::cc6:765:bb:9011", 8), "AB23:0056:901A:0000:0CC6:0765:00BB:9011");
Assert.assertEquals(AclUtils.expandIP("ab23:56:901a:1:cc6:765:bb:9011", 8), "AB23:0056:901A:0001:0CC6:0765:00BB:9011");
Assert.assertEquals(AclUtils.expandIP("5::7:6", 6), "0005:0000:0000:0000:0007:0006");
} |
@Override
public Optional<Map<String, EncryptionInformation>> getReadEncryptionInformation(
ConnectorSession session,
Table table,
Optional<Set<HiveColumnHandle>> requestedColumns,
Map<String, Partition> partitions)
{
Optional<DwrfTableEncryptionProperties> encryptionProperties = getTableEncryptionProperties(table);
if (!encryptionProperties.isPresent()) {
return Optional.empty();
}
Optional<Map<String, String>> fieldToKeyReference = getFieldToKeyReference(encryptionProperties.get(), requestedColumns);
if (!fieldToKeyReference.isPresent()) {
return Optional.empty();
}
return Optional.of(getReadEncryptionInformationInternal(session, table, requestedColumns, partitions, fieldToKeyReference.get(), encryptionProperties.get()));
} | @Test
public void testGetReadEncryptionInformationForPartitionedTableWithColumnLevelEncryption()
{
Table table = createTable(DWRF, Optional.of(forPerColumn(fromHiveProperty("key1:col_string,col_struct.b.b2;key2:col_bigint,col_struct.a"), "algo", "provider")), true);
Optional<Map<String, EncryptionInformation>> encryptionInformation = encryptionInformationSource.getReadEncryptionInformation(
SESSION,
table,
Optional.of(ImmutableSet.of(
// hiveColumnIndex value does not matter in this test
new HiveColumnHandle("col_bigint", HIVE_LONG, HIVE_LONG.getTypeSignature(), 0, REGULAR, Optional.empty(), Optional.empty()),
new HiveColumnHandle("col_map", HIVE_LONG, HIVE_LONG.getTypeSignature(), 0, REGULAR, Optional.empty(), Optional.empty()),
new HiveColumnHandle(
"col_struct",
STRUCT_TYPE,
STRUCT_TYPE.getTypeSignature(),
0,
REGULAR,
Optional.empty(),
ImmutableList.of(new Subfield("col_struct.a"), new Subfield("col_struct.b.b2")),
Optional.empty()))),
ImmutableMap.of(
"ds=2020-01-01", new Partition("dbName", "tableName", ImmutableList.of("2020-01-01"), table.getStorage(), table.getDataColumns(), ImmutableMap.of(), Optional.empty(), false, true, 0, 0, Optional.empty()),
"ds=2020-01-02", new Partition("dbName", "tableName", ImmutableList.of("2020-01-02"), table.getStorage(), table.getDataColumns(), ImmutableMap.of(), Optional.empty(), false, true, 0, 0, Optional.empty())));
Map<String, byte[]> expectedFieldToKeyData = ImmutableMap.of("col_bigint", "key2".getBytes(), "col_struct.a", "key2".getBytes(), "col_struct.b.b2", "key1".getBytes());
assertTrue(encryptionInformation.isPresent());
assertEquals(
encryptionInformation.get(),
ImmutableMap.of(
"ds=2020-01-01", EncryptionInformation.fromEncryptionMetadata(DwrfEncryptionMetadata.forPerField(expectedFieldToKeyData, ImmutableMap.of(TEST_EXTRA_METADATA, "ds=2020-01-01"), "algo", "provider")),
"ds=2020-01-02", EncryptionInformation.fromEncryptionMetadata(DwrfEncryptionMetadata.forPerField(expectedFieldToKeyData, ImmutableMap.of(TEST_EXTRA_METADATA, "ds=2020-01-02"), "algo", "provider"))));
} |
@Override
public byte[] encode(ILoggingEvent event) {
var baos = new ByteArrayOutputStream();
try (var generator = jsonFactory.createGenerator(baos)) {
generator.writeStartObject();
// https://cloud.google.com/logging/docs/structured-logging#structured_logging_special_fields
// https://github.com/googleapis/java-logging-logback/blob/main/src/main/java/com/google/cloud/logging/logback/LoggingAppender.java
writeTimestamp(generator, event);
writeSeverity(generator, event);
writeLogger(generator, event);
writeMessage(generator, event);
writeThread(generator, event);
writeServiceContext(generator);
writeTraceContext(generator);
var mdc = event.getMDCPropertyMap();
writeMdc(generator, mdc);
writeKeyValue(generator, event);
if ("ERROR".equals(event.getLevel().toString())) {
writeError(generator, event, mdc);
}
writeStackTrace(generator, event);
generator.writeEndObject();
generator.writeRaw('\n');
generator.flush();
} catch (NullPointerException | IOException e) {
return logFallbackError(event, e);
}
return baos.toByteArray();
} | @Test
void encode_mdc() {
var e = mockEvent();
when(e.getLevel()).thenReturn(Level.DEBUG);
when(e.getFormattedMessage()).thenReturn("oha, sup?");
when(e.getMDCPropertyMap())
.thenReturn(
Map.of(
"traceId", "k398cidkekk",
"spanId", "499910"));
var msg = encoder.encode(e);
assertMatchesJson(
"""
{"logger":"com.example.MyLogger","mdc":{"spanId":"499910","traceId":"k398cidkekk"},"message":"oha, sup?","severity":"DEBUG","thread_name":"main","time":"2024-08-09T14:13:33Z"}
""",
msg);
} |
@Override
public void deleteDataSourceConfig(Long id) {
// 校验存在
validateDataSourceConfigExists(id);
// 删除
dataSourceConfigMapper.deleteById(id);
} | @Test
public void testDeleteDataSourceConfig_notExists() {
// 准备参数
Long id = randomLongId();
// 调用, 并断言异常
assertServiceException(() -> dataSourceConfigService.deleteDataSourceConfig(id), DATA_SOURCE_CONFIG_NOT_EXISTS);
} |
public <T> List<CompletableFuture<T>> scheduleWriteAllOperation(
String name,
Duration timeout,
CoordinatorWriteOperation<S, T, U> op
) {
throwIfNotRunning();
log.debug("Scheduled execution of write all operation {}.", name);
return coordinators
.keySet()
.stream()
.map(tp -> scheduleWriteOperation(name, tp, timeout, op))
.collect(Collectors.toList());
} | @Test
public void testScheduleWriteAllOperation() throws ExecutionException, InterruptedException, TimeoutException {
MockTimer timer = new MockTimer();
MockPartitionWriter writer = new MockPartitionWriter();
CoordinatorRuntime<MockCoordinatorShard, String> runtime =
new CoordinatorRuntime.Builder<MockCoordinatorShard, String>()
.withTime(timer.time())
.withTimer(timer)
.withDefaultWriteTimeOut(DEFAULT_WRITE_TIMEOUT)
.withLoader(new MockCoordinatorLoader())
.withEventProcessor(new DirectEventProcessor())
.withPartitionWriter(writer)
.withCoordinatorShardBuilderSupplier(new MockCoordinatorShardBuilderSupplier())
.withCoordinatorRuntimeMetrics(mock(CoordinatorRuntimeMetrics.class))
.withCoordinatorMetrics(mock(CoordinatorMetrics.class))
.withSerializer(new StringSerializer())
.build();
TopicPartition coordinator0 = new TopicPartition("__consumer_offsets", 0);
TopicPartition coordinator1 = new TopicPartition("__consumer_offsets", 1);
TopicPartition coordinator2 = new TopicPartition("__consumer_offsets", 2);
// Load coordinators.
runtime.scheduleLoadOperation(coordinator0, 10);
runtime.scheduleLoadOperation(coordinator1, 10);
runtime.scheduleLoadOperation(coordinator2, 10);
// Writes.
AtomicInteger cnt = new AtomicInteger(0);
List<CompletableFuture<List<String>>> writes = runtime.scheduleWriteAllOperation("write", DEFAULT_WRITE_TIMEOUT, state -> {
int counter = cnt.getAndIncrement();
return new CoordinatorResult<>(
Collections.singletonList("record#" + counter),
Collections.singletonList("response#" + counter)
);
});
assertEquals(1L, runtime.contextOrThrow(coordinator0).coordinator.lastWrittenOffset());
assertEquals(1L, runtime.contextOrThrow(coordinator1).coordinator.lastWrittenOffset());
assertEquals(1L, runtime.contextOrThrow(coordinator2).coordinator.lastWrittenOffset());
assertEquals(Collections.singletonList(records(timer.time().milliseconds(), "record#0")), writer.entries(coordinator0));
assertEquals(Collections.singletonList(records(timer.time().milliseconds(), "record#1")), writer.entries(coordinator1));
assertEquals(Collections.singletonList(records(timer.time().milliseconds(), "record#2")), writer.entries(coordinator2));
// Commit.
writer.commit(coordinator0);
writer.commit(coordinator1);
writer.commit(coordinator2);
// Verify.
assertEquals(
Arrays.asList("response#0", "response#1", "response#2"),
FutureUtils.combineFutures(writes, ArrayList::new, List::addAll).get(5, TimeUnit.SECONDS)
);
} |
@Override
public Column convert(BasicTypeDefine typeDefine) {
PhysicalColumn.PhysicalColumnBuilder builder =
PhysicalColumn.builder()
.name(typeDefine.getName())
.sourceType(typeDefine.getColumnType())
.nullable(typeDefine.isNullable())
.defaultValue(typeDefine.getDefaultValue())
.comment(typeDefine.getComment());
String pgDataType = typeDefine.getDataType().toLowerCase();
switch (pgDataType) {
case PG_BOOLEAN:
builder.dataType(BasicType.BOOLEAN_TYPE);
break;
case PG_BOOLEAN_ARRAY:
builder.dataType(ArrayType.BOOLEAN_ARRAY_TYPE);
break;
case PG_SMALLSERIAL:
case PG_SMALLINT:
builder.dataType(BasicType.SHORT_TYPE);
break;
case PG_SMALLINT_ARRAY:
builder.dataType(ArrayType.SHORT_ARRAY_TYPE);
break;
case PG_INTEGER:
case PG_SERIAL:
builder.dataType(BasicType.INT_TYPE);
break;
case PG_INTEGER_ARRAY:
builder.dataType(ArrayType.INT_ARRAY_TYPE);
break;
case PG_BIGINT:
case PG_BIGSERIAL:
builder.dataType(BasicType.LONG_TYPE);
break;
case PG_BIGINT_ARRAY:
builder.dataType(ArrayType.LONG_ARRAY_TYPE);
break;
case PG_REAL:
builder.dataType(BasicType.FLOAT_TYPE);
break;
case PG_REAL_ARRAY:
builder.dataType(ArrayType.FLOAT_ARRAY_TYPE);
break;
case PG_DOUBLE_PRECISION:
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case PG_DOUBLE_PRECISION_ARRAY:
builder.dataType(ArrayType.DOUBLE_ARRAY_TYPE);
break;
case PG_NUMERIC:
DecimalType decimalType;
if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) {
decimalType =
new DecimalType(
typeDefine.getPrecision().intValue(), typeDefine.getScale());
} else {
decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE);
}
builder.dataType(decimalType);
break;
case PG_MONEY:
// -92233720368547758.08 to +92233720368547758.07, With the sign bit it's 20, we use
// 30 precision to save it
DecimalType moneyDecimalType;
moneyDecimalType = new DecimalType(30, 2);
builder.dataType(moneyDecimalType);
builder.columnLength(30L);
builder.scale(2);
break;
case PG_CHAR:
case PG_CHARACTER:
builder.dataType(BasicType.STRING_TYPE);
if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) {
builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L));
builder.sourceType(pgDataType);
} else {
builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength()));
builder.sourceType(String.format("%s(%s)", pgDataType, typeDefine.getLength()));
}
break;
case PG_VARCHAR:
case PG_CHARACTER_VARYING:
builder.dataType(BasicType.STRING_TYPE);
if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) {
builder.sourceType(pgDataType);
} else {
builder.sourceType(String.format("%s(%s)", pgDataType, typeDefine.getLength()));
builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength()));
}
break;
case PG_TEXT:
builder.dataType(BasicType.STRING_TYPE);
break;
case PG_UUID:
builder.dataType(BasicType.STRING_TYPE);
builder.sourceType(pgDataType);
builder.columnLength(128L);
break;
case PG_JSON:
case PG_JSONB:
case PG_XML:
case PG_GEOMETRY:
case PG_GEOGRAPHY:
builder.dataType(BasicType.STRING_TYPE);
break;
case PG_CHAR_ARRAY:
case PG_VARCHAR_ARRAY:
case PG_TEXT_ARRAY:
builder.dataType(ArrayType.STRING_ARRAY_TYPE);
break;
case PG_BYTEA:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
break;
case PG_DATE:
builder.dataType(LocalTimeType.LOCAL_DATE_TYPE);
break;
case PG_TIME:
case PG_TIME_TZ:
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
if (typeDefine.getScale() != null && typeDefine.getScale() > MAX_TIME_SCALE) {
builder.scale(MAX_TIME_SCALE);
log.warn(
"The scale of time type is larger than {}, it will be truncated to {}",
MAX_TIME_SCALE,
MAX_TIME_SCALE);
} else {
builder.scale(typeDefine.getScale());
}
break;
case PG_TIMESTAMP:
case PG_TIMESTAMP_TZ:
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
if (typeDefine.getScale() != null && typeDefine.getScale() > MAX_TIMESTAMP_SCALE) {
builder.scale(MAX_TIMESTAMP_SCALE);
log.warn(
"The scale of timestamp type is larger than {}, it will be truncated to {}",
MAX_TIMESTAMP_SCALE,
MAX_TIMESTAMP_SCALE);
} else {
builder.scale(typeDefine.getScale());
}
break;
default:
throw CommonError.convertToSeaTunnelTypeError(
identifier(), typeDefine.getDataType(), typeDefine.getName());
}
return builder.build();
} | @Test
public void testConvertDecimal() {
BasicTypeDefine<Object> typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("numeric(38,2)")
.dataType("numeric")
.precision(38L)
.scale(2)
.build();
Column column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(new DecimalType(38, 2), column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType());
typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("numeric")
.dataType("numeric")
.build();
column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(new DecimalType(38, 18), column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType());
} |
public static String getAppName() {
String appName;
appName = getAppNameByProjectName();
if (appName != null) {
return appName;
}
appName = getAppNameByServerHome();
if (appName != null) {
return appName;
}
return DEFAULT_APP_NAME;
} | @Test
void testGetAppNameByServerTypeForJetty() {
System.setProperty("jetty.home", "/home/admin/testAppName/");
String appName = AppNameUtils.getAppName();
assertEquals("testAppName", appName);
} |
@Override
public SchemaKStream<?> buildStream(final PlanBuildContext buildContext) {
final Stacker contextStacker = buildContext.buildNodeContext(getId().toString());
return schemaKStreamFactory.create(
buildContext,
dataSource,
contextStacker.push(SOURCE_OP_NAME)
);
} | @Test
public void shouldBuildSourceNode() {
// When:
realStream = buildStream(node);
// Then:
final TopologyDescription.Source node = (TopologyDescription.Source) getNodeByName(realBuilder.build(), PlanTestUtil.SOURCE_NODE);
final List<String> successors = node.successors().stream().map(TopologyDescription.Node::name).collect(Collectors.toList());
assertThat(node.predecessors(), equalTo(Collections.emptySet()));
assertThat(successors, equalTo(Collections.singletonList(PlanTestUtil.TRANSFORM_NODE)));
assertThat(node.topicSet(), equalTo(ImmutableSet.of("topic")));
} |
public static long timeUnitToMill(String timeStrWithUnit) {
// If `timeStrWithUnit` doesn't include time unit,
// `Duration.parse` would fail to parse and throw Exception.
if (timeStrWithUnit.endsWith("ms")) {
return Long.parseLong(timeStrWithUnit.substring(0, timeStrWithUnit.length() - 2));
}
return Duration.parse("PT" + timeStrWithUnit).toMillis();
} | @Test
void testTimeUnitToMill_WithoutUnit_1() {
assertThrows(DateTimeParseException.class, () -> {
ZeppelinConfiguration.timeUnitToMill("60000");
});
} |
public void execute() throws Exception {
forward();
LOG.info("forwarding to master get result max journal id: {}", result.maxJournalId);
ctx.getGlobalStateMgr().getJournalObservable().waitOn(result.maxJournalId, waitTimeoutMs);
if (result.state != null) {
MysqlStateType state = MysqlStateType.fromString(result.state);
if (state != null) {
ctx.getState().setStateType(state);
if (result.isSetErrorMsg()) {
ctx.getState().setMsg(result.getErrorMsg());
}
if (state == MysqlStateType.EOF || state == MysqlStateType.OK) {
afterForward();
}
}
}
if (result.isSetResource_group_name()) {
ctx.getAuditEventBuilder().setResourceGroup(result.getResource_group_name());
}
if (result.isSetAudit_statistics()) {
TAuditStatistics tAuditStatistics = result.getAudit_statistics();
if (ctx.getExecutor() != null) {
ctx.getExecutor().setQueryStatistics(AuditStatisticsUtil.toProtobuf(tAuditStatistics));
}
}
} | @Test
public void testResourceGroupNameInAuditLog() throws Exception {
String createGroup = "create resource group rg1\n" +
"to\n" +
" (db='d1')\n" +
"with (\n" +
" 'cpu_core_limit' = '1',\n" +
" 'mem_limit' = '50%',\n" +
" 'concurrency_limit' = '20',\n" +
" 'type' = 'normal'\n" +
");";
cluster.runSql("d1", createGroup);
String sql = "insert into t1 select * from t1";
StatementBase stmtBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
LeaderOpExecutor executor =
new LeaderOpExecutor(stmtBase, stmtBase.getOrigStmt(), connectContext, RedirectStatus.FORWARD_NO_SYNC);
mockFrontendService(new MockFrontendServiceClient());
executor.execute();
Assert.assertEquals("rg1", connectContext.getAuditEventBuilder().build().resourceGroup);
} |
public void fetch(DownloadAction downloadAction, URLService urlService) throws Exception {
downloadChecksumFile(downloadAction, urlService.baseRemoteURL());
downloadArtifact(downloadAction, urlService.baseRemoteURL());
} | @Test
public void shouldMakeTheFetchHandlerUseTheArtifactMd5Checksum() throws Exception {
ArtifactMd5Checksums artifactMd5Checksums = mock(ArtifactMd5Checksums.class);
when(urlService.baseRemoteURL()).thenReturn("http://10.10.1.1/go/files");
when(checksumFileHandler.url("http://10.10.1.1/go/files", "cruise/10/dev/1/windows")).thenReturn("http://10.10.1.1/go/files/cruise/10/dev/1/windows/cruise-output/md5.checksum");
when(checksumFileHandler.getArtifactMd5Checksums()).thenReturn(artifactMd5Checksums);
FetchHandler fetchHandler = mock(FetchHandler.class);
FetchArtifactBuilder builder = getBuilder(new JobIdentifier("cruise", 10, "1", "dev", "1", "windows", 1L), "log", dest.getPath(), fetchHandler, checksumFileHandler);
builder.fetch(downloadAction, urlService);
verify(fetchHandler).useArtifactMd5Checksums(artifactMd5Checksums);
} |
public ModuleBuilder addRegistries(List<? extends RegistryConfig> registries) {
if (this.registries == null) {
this.registries = new ArrayList<>();
}
this.registries.addAll(registries);
return getThis();
} | @Test
void addRegistries() {
RegistryConfig registry = new RegistryConfig();
ModuleBuilder builder = ModuleBuilder.newBuilder();
builder.addRegistries(Collections.singletonList(registry));
Assertions.assertTrue(builder.build().getRegistries().contains(registry));
Assertions.assertEquals(1, builder.build().getRegistries().size());
} |
Configuration get() {
return this.hadoopConfig;
} | @Test
void customPropertiesSurviveSerializationDeserialization()
throws IOException, ClassNotFoundException {
final SerializableHadoopConfiguration serializableConfigUnderTest =
new SerializableHadoopConfiguration(configuration);
final byte[] serializedConfigUnderTest = serializeAndGetBytes(serializableConfigUnderTest);
final SerializableHadoopConfiguration deserializableConfigUnderTest =
deserializeAndGetConfiguration(serializedConfigUnderTest);
Assertions.<Configuration>assertThat(deserializableConfigUnderTest.get())
.describedAs(
"a Hadoop Configuration with property: key=%s and value=%s",
TEST_KEY, TEST_VALUE)
.satisfies(
actualConfig -> {
Assertions.assertThat(actualConfig)
.isNotSameAs(serializableConfigUnderTest.get());
Assertions.assertThat(actualConfig.get(TEST_KEY))
.isEqualTo(serializableConfigUnderTest.get().get(TEST_KEY));
});
} |
public JobMetaDataParameterObject processJobMultipart(JobMultiPartParameterObject parameterObject)
throws IOException, NoSuchAlgorithmException {
// Change the timestamp in the beginning to avoid expiration
changeLastUpdatedTime();
validateReceivedParameters(parameterObject);
validateReceivedPartNumbersAreExpected(parameterObject);
validatePartChecksum(parameterObject);
// Parts numbers are good. Save them
currentPart = parameterObject.getCurrentPartNumber();
totalPart = parameterObject.getTotalPartNumber();
Path jarPath = jobMetaDataParameterObject.getJarPath();
// Append data to file
try (OutputStream outputStream = Files.newOutputStream(jarPath, StandardOpenOption.CREATE, StandardOpenOption.APPEND)) {
outputStream.write(parameterObject.getPartData(), 0, parameterObject.getPartSize());
}
if (LOGGER.isInfoEnabled()) {
String message = String.format("Session : %s jarPath: %s PartNumber: %d/%d Total file size : %d bytes",
parameterObject.getSessionId(), jarPath, currentPart, totalPart, Files.size(jarPath));
LOGGER.info(message);
}
JobMetaDataParameterObject result = null;
// If parts are complete
if (currentPart == totalPart) {
validateJarChecksum();
result = jobMetaDataParameterObject;
}
return result;
} | @Test
public void testZeroPartSize() {
byte[] partData = new byte[]{1};
JobMultiPartParameterObject jobMultiPartParameterObject = new JobMultiPartParameterObject();
jobMultiPartParameterObject.setSessionId(null);
jobMultiPartParameterObject.setCurrentPartNumber(1);
jobMultiPartParameterObject.setTotalPartNumber(1);
jobMultiPartParameterObject.setPartData(partData);
jobMultiPartParameterObject.setPartSize(0);
Assert.assertThrows(JetException.class, () -> jobUploadStatus.processJobMultipart(jobMultiPartParameterObject));
} |
public static ThreadFactory createThreadFactory(final String pattern,
final boolean daemon) {
return new ThreadFactory() {
private final AtomicLong threadEpoch = new AtomicLong(0);
@Override
public Thread newThread(Runnable r) {
String threadName;
if (pattern.contains("%d")) {
threadName = String.format(pattern, threadEpoch.addAndGet(1));
} else {
threadName = pattern;
}
Thread thread = new Thread(r, threadName);
thread.setDaemon(daemon);
return thread;
}
};
} | @Test
public void testThreadNameWithNumberNoDemon() {
ThreadFactory localThreadFactory = ThreadUtils.createThreadFactory(THREAD_NAME_WITH_NUMBER, false);
assertEquals(THREAD_NAME + "1", localThreadFactory.newThread(EMPTY_RUNNABLE).getName());
assertEquals(THREAD_NAME + "2", localThreadFactory.newThread(EMPTY_RUNNABLE).getName());
} |
public StrBuilder reset() {
this.position = 0;
return this;
} | @Test
public void resetTest() {
StrBuilder builder = StrBuilder.create(1);
builder.append("aaa").append("你好").append('r');
builder.insert(3, "数据插入");
builder.reset();
assertEquals("", builder.toString());
} |
@Override
public double rank(Corpus corpus, TextTerms doc, String term, int tf, int n) {
if (tf <= 0) return 0.0;
int N = corpus.ndoc();
int docSize = doc.size();
int avgDocSize = corpus.avgDocSize();
return score(tf, docSize, avgDocSize, N, n);
} | @Test
public void testRank() {
System.out.println("rank");
int freq = 3;
int docSize = 100;
int avgDocSize = 150;
int N = 10000000;
int n = 1000;
BM25 instance = new BM25(2.0, 0.75, 0.0);
double expResult = 18.419681;
double result = instance.score(freq, docSize, avgDocSize, N, n);
assertEquals(expResult, result, 1E-6);
} |
@Override
public void showUpWebView(WebView webView, boolean isSupportJellyBean) {
} | @Test
public void testShowUpWebView2() {
WebView webView = new WebView(mApplication);
mSensorsAPI.showUpWebView(webView, false);
} |
public static OffchainLookup build(byte[] bytes) {
List<Type> resultList =
FunctionReturnDecoder.decode(Numeric.toHexString(bytes), outputParameters);
return new OffchainLookup(
(Address) resultList.get(0),
(DynamicArray<Utf8String>) resultList.get(1),
(DynamicBytes) resultList.get(2),
(Bytes4) resultList.get(3),
(DynamicBytes) resultList.get(4));
} | @Test
void build() {
OffchainLookup offchainLookup =
OffchainLookup.build(Numeric.hexStringToByteArray(LOOKUP_HEX.substring(10)));
assertNotNull(offchainLookup);
assertEquals("0xc1735677a60884abbcf72295e88d47764beda282", offchainLookup.getSender());
assertNotNull(offchainLookup.getUrls());
assertEquals(1, offchainLookup.getUrls().size());
assertEquals(
"https://offchain-resolver-example.uc.r.appspot.com/{sender}/{data}.json",
offchainLookup.getUrls().get(0));
assertEquals(CALL_DATA, Numeric.toHexString(offchainLookup.getCallData()));
assertEquals(EXTRA_DATA, Numeric.toHexString(offchainLookup.getExtraData()));
assertEquals(CALLBACK_FUNC, Numeric.toHexString(offchainLookup.getCallbackFunction()));
} |
public static Optional<SingleMetaDataValidator> newInstance(final SQLStatement sqlStatement) {
if (sqlStatement instanceof DropSchemaStatement) {
return Optional.of(new SingleDropSchemaMetaDataValidator());
}
if (sqlStatement instanceof DropTableStatement) {
return Optional.of(new SingleDropTableValidator());
}
return Optional.empty();
} | @Test
void assertNewInstanceForDropSchemaStatement() {
assertTrue(SingleMetaDataValidatorFactory.newInstance(mock(DropSchemaStatement.class)).isPresent());
} |
public static <T> PTransform<PCollection<T>, PCollection<T>> exceptDistinct(
PCollection<T> rightCollection) {
checkNotNull(rightCollection, "rightCollection argument is null");
return new SetImpl<>(rightCollection, exceptDistinct());
} | @Test
@Category(NeedsRunner.class)
public void testExceptCollectionList() {
PCollection<String> third = p.apply("third", Create.of(Arrays.asList("a", "b", "b", "g", "g")));
PCollection<Row> thirdRows = p.apply("thirdRows", Create.of(toRows("a", "b", "b", "g", "g")));
PAssert.that(
PCollectionList.of(first)
.and(second)
.and(third)
.apply("stringsCols", Sets.exceptDistinct()))
.containsInAnyOrder("h");
PCollection<Row> results =
PCollectionList.of(firstRows)
.and(secondRows)
.and(thirdRows)
.apply("rowCols", Sets.exceptDistinct());
PAssert.that(results).containsInAnyOrder(toRows("h"));
assertEquals(schema, results.getSchema());
p.run();
} |
@Override
public void register(final TopicPartition partition, final ProcessorStateManager stateManager) {
final StateStoreMetadata storeMetadata = stateManager.storeMetadata(partition);
if (storeMetadata == null) {
throw new IllegalStateException("Cannot find the corresponding state store metadata for changelog " +
partition);
}
final ChangelogMetadata changelogMetadata = new ChangelogMetadata(storeMetadata, stateManager);
// initializing limit offset to 0L for standby changelog to effectively disable any restoration until it is updated
if (stateManager.taskType() == Task.TaskType.STANDBY && stateManager.changelogAsSource(partition)) {
changelogMetadata.restoreEndOffset = 0L;
}
if (changelogs.putIfAbsent(partition, changelogMetadata) != null) {
throw new IllegalStateException("There is already a changelog registered for " + partition +
", this should not happen: " + changelogs);
}
} | @Test
public void shouldNotRegisterStoreWithoutMetadata() {
assertThrows(IllegalStateException.class,
() -> changelogReader.register(new TopicPartition("ChangelogWithoutStoreMetadata", 0), stateManager));
} |
public static LocalDateTime parse(String dateTime, DateTimeFormatter dateTimeFormatter) {
TemporalAccessor parsedTimestamp = dateTimeFormatter.parse(dateTime);
LocalTime localTime = parsedTimestamp.query(TemporalQueries.localTime());
LocalDate localDate = parsedTimestamp.query(TemporalQueries.localDate());
return LocalDateTime.of(localDate, localTime);
} | @Test
public void testParseTimestamp() {
// 2023-12-22 12:55:20
final long timestamp = 1703220920013L;
LocalDateTime parse = DateTimeUtils.parse(timestamp, ZoneId.of("Asia/Shanghai"));
Assertions.assertEquals(55, parse.getMinute());
Assertions.assertEquals(12, parse.getHour());
Assertions.assertEquals(20, parse.getSecond());
Assertions.assertEquals(22, parse.getDayOfMonth());
Assertions.assertEquals(12, parse.getMonth().getValue());
Assertions.assertEquals(2023, parse.getYear());
Assertions.assertEquals(22, parse.getDayOfMonth());
} |
public static void validatePermission(@Nullable String tableName, AccessType accessType,
@Nullable HttpHeaders httpHeaders, String endpointUrl, AccessControl accessControl) {
String userMessage = getUserMessage(tableName, accessType, endpointUrl);
String rawTableName = TableNameBuilder.extractRawTableName(tableName);
try {
if (rawTableName == null) {
if (accessControl.hasAccess(accessType, httpHeaders, endpointUrl)) {
return;
}
} else {
if (accessControl.hasAccess(rawTableName, accessType, httpHeaders, endpointUrl)) {
return;
}
}
} catch (WebApplicationException exception) {
// throwing the exception if it's WebApplicationException
throw exception;
} catch (Throwable t) {
// catch and log Throwable for NoSuchMethodError which can happen when there are classpath conflicts
// otherwise, grizzly will return a 500 without any logs or indication of what failed
throw new ControllerApplicationException(LOGGER,
"Caught exception while validating permission for " + userMessage, Response.Status.INTERNAL_SERVER_ERROR, t);
}
throw new ControllerApplicationException(LOGGER, "Permission is denied for " + userMessage,
Response.Status.FORBIDDEN);
} | @Test
public void testValidatePermissionAllowed() {
AccessControl ac = Mockito.mock(AccessControl.class);
HttpHeaders mockHttpHeaders = Mockito.mock(HttpHeaders.class);
Mockito.when(ac.hasAccess(_table, AccessType.READ, mockHttpHeaders, _endpoint)).thenReturn(true);
AccessControlUtils.validatePermission(_table, AccessType.READ, mockHttpHeaders, _endpoint, ac);
} |
public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, List<String> owners,
String createdAt, String expiresAt, String description,
String modificationEffect, Dimension... dimensions) {
return define(UnboundDoubleFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions);
} | @Test
void testDouble() {
testGeneric(Flags.defineDoubleFlag("double-id", 3.142, List.of("owner"), "1970-01-01", "2100-01-01", "desc", "mod"), 2.718);
} |
public static String[] parseKey(String groupKey) {
StringBuilder sb = new StringBuilder();
String dataId = null;
String group = null;
String tenant = null;
for (int i = 0; i < groupKey.length(); ++i) {
char c = groupKey.charAt(i);
if (PLUS == c) {
if (null == dataId) {
dataId = sb.toString();
sb.setLength(0);
} else if (null == group) {
group = sb.toString();
sb.setLength(0);
} else {
throw new IllegalArgumentException("invalid groupkey:" + groupKey);
}
} else if (PERCENT == c) {
char next = groupKey.charAt(++i);
char nextnext = groupKey.charAt(++i);
if (TWO == next && B == nextnext) {
sb.append(PLUS);
} else if (TWO == next && FIVE == nextnext) {
sb.append(PERCENT);
} else {
throw new IllegalArgumentException("invalid groupkey:" + groupKey);
}
} else {
sb.append(c);
}
}
if (group == null) {
group = sb.toString();
} else {
tenant = sb.toString();
}
if (StringUtils.isBlank(dataId)) {
throw new IllegalArgumentException("invalid dataId");
}
if (StringUtils.isBlank(group)) {
throw new IllegalArgumentException("invalid group");
}
return new String[] {dataId, group, tenant};
} | @Test
void testParseKeyIllegalArgumentException2() {
assertThrows(IllegalArgumentException.class, () -> {
GroupKey.parseKey("f%oo");
});
} |
public static void checkProxyPortProperty() throws NumberFormatException {
checkNumericSystemProperty("http.proxyPort", Range.closed(0, 65535));
checkNumericSystemProperty("https.proxyPort", Range.closed(0, 65535));
} | @Test
public void testCheckHttpProxyPortProperty_undefined() throws NumberFormatException {
System.clearProperty("http.proxyPort");
System.clearProperty("https.proxyPort");
JibSystemProperties.checkProxyPortProperty();
} |
public static Map<String, String> getKiePMMLRegressionModelSourcesMap(final RegressionCompilationDTO compilationDTO) throws IOException {
logger.trace("getKiePMMLRegressionModelSourcesMap {} {} {}", compilationDTO.getFields(),
compilationDTO.getModel(),
compilationDTO.getPackageName());
String className = compilationDTO.getSimpleClassName();
CompilationUnit cloneCU = JavaParserUtils.getKiePMMLModelCompilationUnit(className,
compilationDTO.getPackageName(),
KIE_PMML_REGRESSION_MODEL_TEMPLATE_JAVA, KIE_PMML_REGRESSION_MODEL_TEMPLATE);
ClassOrInterfaceDeclaration modelTemplate = cloneCU.getClassByName(className)
.orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + className));
Map<String, KiePMMLTableSourceCategory> tablesSourceMap = getRegressionTablesMap(compilationDTO);
String nestedTable = tablesSourceMap.size() == 1 ? tablesSourceMap.keySet().iterator().next() :
tablesSourceMap
.keySet()
.stream()
.filter(tableName -> tableName.startsWith(compilationDTO.getPackageName() +
".KiePMMLClassificationTable"))
.findFirst()
.orElseThrow(() -> new KiePMMLException("Failed to find expected " +
"KiePMMLClassificationTable"));
setStaticGetter(compilationDTO,
modelTemplate,
nestedTable);
Map<String, String> toReturn = tablesSourceMap.entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().getSource()));
toReturn.put(getFullClassName(cloneCU), cloneCU.toString());
return toReturn;
} | @Test
void getKiePMMLRegressionModelSourcesMap() throws IOException {
final CommonCompilationDTO<RegressionModel> compilationDTO =
CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
pmml,
regressionModel,
new PMMLCompilationContextMock(),
"FILENAME");
Map<String, String> retrieved =
KiePMMLRegressionModelFactory.getKiePMMLRegressionModelSourcesMap(RegressionCompilationDTO.fromCompilationDTO(compilationDTO));
assertThat(retrieved).isNotNull();
int expectedSize = regressionTables.size()
+ 2; // One for classification and one for the whole model
assertThat(retrieved).hasSize(expectedSize);
} |
@Override
public List<PinotTaskConfig> generateTasks(List<TableConfig> tableConfigs) {
String taskType = MinionConstants.UpsertCompactionTask.TASK_TYPE;
List<PinotTaskConfig> pinotTaskConfigs = new ArrayList<>();
for (TableConfig tableConfig : tableConfigs) {
if (!validate(tableConfig)) {
LOGGER.warn("Validation failed for table {}. Skipping..", tableConfig.getTableName());
continue;
}
String tableNameWithType = tableConfig.getTableName();
LOGGER.info("Start generating task configs for table: {}", tableNameWithType);
if (tableConfig.getTaskConfig() == null) {
LOGGER.warn("Task config is null for table: {}", tableNameWithType);
continue;
}
Map<String, String> taskConfigs = tableConfig.getTaskConfig().getConfigsForTaskType(taskType);
List<SegmentZKMetadata> allSegments = _clusterInfoAccessor.getSegmentsZKMetadata(tableNameWithType);
// Get completed segments and filter out the segments based on the buffer time configuration
List<SegmentZKMetadata> completedSegments =
getCompletedSegments(taskConfigs, allSegments, System.currentTimeMillis());
if (completedSegments.isEmpty()) {
LOGGER.info("No completed segments were eligible for compaction for table: {}", tableNameWithType);
continue;
}
// Only schedule 1 task of this type, per table
Map<String, TaskState> incompleteTasks =
TaskGeneratorUtils.getIncompleteTasks(taskType, tableNameWithType, _clusterInfoAccessor);
if (!incompleteTasks.isEmpty()) {
LOGGER.warn("Found incomplete tasks: {} for same table: {} and task type: {}. Skipping task generation.",
incompleteTasks.keySet(), tableNameWithType, taskType);
continue;
}
// get server to segment mappings
PinotHelixResourceManager pinotHelixResourceManager = _clusterInfoAccessor.getPinotHelixResourceManager();
Map<String, List<String>> serverToSegments = pinotHelixResourceManager.getServerToSegmentsMap(tableNameWithType);
BiMap<String, String> serverToEndpoints;
try {
serverToEndpoints = pinotHelixResourceManager.getDataInstanceAdminEndpoints(serverToSegments.keySet());
} catch (InvalidConfigException e) {
throw new RuntimeException(e);
}
ServerSegmentMetadataReader serverSegmentMetadataReader =
new ServerSegmentMetadataReader(_clusterInfoAccessor.getExecutor(),
_clusterInfoAccessor.getConnectionManager());
// By default, we use 'snapshot' for validDocIdsType. This means that we will use the validDocIds bitmap from
// the snapshot from Pinot segment. This will require 'enableSnapshot' from UpsertConfig to be set to true.
String validDocIdsTypeStr =
taskConfigs.getOrDefault(UpsertCompactionTask.VALID_DOC_IDS_TYPE, ValidDocIdsType.SNAPSHOT.toString());
ValidDocIdsType validDocIdsType = ValidDocIdsType.valueOf(validDocIdsTypeStr.toUpperCase());
// Number of segments to query per server request. If a table has a lot of segments, then we might send a
// huge payload to pinot-server in request. Batching the requests will help in reducing the payload size.
int numSegmentsBatchPerServerRequest = Integer.parseInt(
taskConfigs.getOrDefault(UpsertCompactionTask.NUM_SEGMENTS_BATCH_PER_SERVER_REQUEST,
String.valueOf(DEFAULT_NUM_SEGMENTS_BATCH_PER_SERVER_REQUEST)));
// Validate that the snapshot is enabled if validDocIdsType is validDocIdsSnapshot
if (validDocIdsType == ValidDocIdsType.SNAPSHOT) {
UpsertConfig upsertConfig = tableConfig.getUpsertConfig();
Preconditions.checkNotNull(upsertConfig, "UpsertConfig must be provided for UpsertCompactionTask");
Preconditions.checkState(upsertConfig.isEnableSnapshot(), String.format(
"'enableSnapshot' from UpsertConfig must be enabled for UpsertCompactionTask with validDocIdsType = %s",
validDocIdsType));
} else if (validDocIdsType == ValidDocIdsType.IN_MEMORY_WITH_DELETE) {
UpsertConfig upsertConfig = tableConfig.getUpsertConfig();
Preconditions.checkNotNull(upsertConfig, "UpsertConfig must be provided for UpsertCompactionTask");
Preconditions.checkNotNull(upsertConfig.getDeleteRecordColumn(),
String.format("deleteRecordColumn must be provided for " + "UpsertCompactionTask with validDocIdsType = %s",
validDocIdsType));
}
Map<String, List<ValidDocIdsMetadataInfo>> validDocIdsMetadataList =
serverSegmentMetadataReader.getSegmentToValidDocIdsMetadataFromServer(tableNameWithType, serverToSegments,
serverToEndpoints, null, 60_000, validDocIdsType.toString(), numSegmentsBatchPerServerRequest);
Map<String, SegmentZKMetadata> completedSegmentsMap =
completedSegments.stream().collect(Collectors.toMap(SegmentZKMetadata::getSegmentName, Function.identity()));
SegmentSelectionResult segmentSelectionResult =
processValidDocIdsMetadata(taskConfigs, completedSegmentsMap, validDocIdsMetadataList);
if (!segmentSelectionResult.getSegmentsForDeletion().isEmpty()) {
pinotHelixResourceManager.deleteSegments(tableNameWithType, segmentSelectionResult.getSegmentsForDeletion(),
"0d");
LOGGER.info(
"Deleted segments containing only invalid records for table: {}, number of segments to be deleted: {}",
tableNameWithType, segmentSelectionResult.getSegmentsForDeletion());
}
int numTasks = 0;
int maxTasks = getMaxTasks(taskType, tableNameWithType, taskConfigs);
for (SegmentZKMetadata segment : segmentSelectionResult.getSegmentsForCompaction()) {
if (numTasks == maxTasks) {
break;
}
if (StringUtils.isBlank(segment.getDownloadUrl())) {
LOGGER.warn("Skipping segment {} for task {} as download url is empty", segment.getSegmentName(), taskType);
continue;
}
Map<String, String> configs = new HashMap<>(getBaseTaskConfigs(tableConfig, List.of(segment.getSegmentName())));
configs.put(MinionConstants.DOWNLOAD_URL_KEY, segment.getDownloadUrl());
configs.put(MinionConstants.UPLOAD_URL_KEY, _clusterInfoAccessor.getVipUrl() + "/segments");
configs.put(MinionConstants.ORIGINAL_SEGMENT_CRC_KEY, String.valueOf(segment.getCrc()));
configs.put(UpsertCompactionTask.VALID_DOC_IDS_TYPE, validDocIdsType.toString());
pinotTaskConfigs.add(new PinotTaskConfig(UpsertCompactionTask.TASK_TYPE, configs));
numTasks++;
}
LOGGER.info("Finished generating {} tasks configs for table: {}", numTasks, tableNameWithType);
}
return pinotTaskConfigs;
} | @Test
public void testGenerateTasksWithNewlyCompletedSegment() {
when(_mockClusterInfoAccessor.getSegmentsZKMetadata(REALTIME_TABLE_NAME)).thenReturn(
Lists.newArrayList(_completedSegment));
when(_mockClusterInfoAccessor.getIdealState(REALTIME_TABLE_NAME)).thenReturn(
getIdealState(REALTIME_TABLE_NAME, Lists.newArrayList(_completedSegment.getSegmentName())));
_taskGenerator.init(_mockClusterInfoAccessor);
List<PinotTaskConfig> pinotTaskConfigs = _taskGenerator.generateTasks(Lists.newArrayList(_tableConfig));
assertEquals(pinotTaskConfigs.size(), 0);
} |
@Override
public void setProperties(final Properties properties) {
} | @Test
public void setPropertiesTest() {
final PostgreSQLPrepareInterceptor postgreSQLPrepareInterceptor = new PostgreSQLPrepareInterceptor();
Assertions.assertDoesNotThrow(() -> postgreSQLPrepareInterceptor.setProperties(mock(Properties.class)));
} |
public String getName() {
if ( transMeta == null ) {
return null;
}
return transMeta.getName();
} | @Test
public void testFindDatabaseWithEncodedConnectionName() {
DatabaseMeta dbMeta1 =
new DatabaseMeta( "encoded_DBConnection", "Oracle", "localhost", "access", "test", "111", "test", "test" );
dbMeta1.setDisplayName( "encoded.DBConnection" );
meta.addDatabase( dbMeta1 );
DatabaseMeta dbMeta2 =
new DatabaseMeta( "normalDBConnection", "Oracle", "localhost", "access", "test", "111", "test", "test" );
dbMeta2.setDisplayName( "normalDBConnection" );
meta.addDatabase( dbMeta2 );
DatabaseMeta databaseMeta = meta.findDatabase( dbMeta1.getDisplayName() );
assertNotNull( databaseMeta );
assertEquals( "encoded_DBConnection", databaseMeta.getName() );
assertEquals( "encoded.DBConnection", databaseMeta.getDisplayName() );
} |
public String selectProtocol() {
if (members.isEmpty()) {
throw new IllegalStateException("Cannot select protocol for empty group");
}
// select the protocol for this group which is supported by all members
Set<String> candidates = candidateProtocols();
// let each member vote for one of the protocols
Map<String, Integer> votesByProtocol = new HashMap<>();
allMembers().stream().map(member -> member.vote(candidates))
.forEach(protocolName -> {
int count = votesByProtocol.getOrDefault(protocolName, 0);
votesByProtocol.put(protocolName, count + 1);
});
// choose the one with the most votes
return votesByProtocol.entrySet().stream()
.max(Comparator.comparingInt(Map.Entry::getValue))
.map(Map.Entry::getKey).orElse(null);
} | @Test
public void testSelectProtocolRaisesIfNoMembers() {
assertThrows(IllegalStateException.class, () -> group.selectProtocol());
} |
@Override
public void flush(final ByteBuffer buffer, final Consumer<Map<String, Object>> eventConsumer) {
decodeMetricIn.increment();
decodeMetricTime.time(() -> codec.flush(buffer, (event) -> {
decodeMetricOut.increment();
eventConsumer.accept(event);
}));
} | @Test
public void flushIncrementsEventCount() {
codec = new AbstractCodec() {
@Override
public void flush(final ByteBuffer buffer, final Consumer<Map<String, Object>> eventConsumer) {
eventConsumer.accept(ImmutableMap.of("message", "abcdef"));
eventConsumer.accept(ImmutableMap.of("message", "1234567"));
}
};
final JavaCodecDelegator codecDelegator = constructCodecDelegator();
codecDelegator.flush(ByteBuffer.wrap(new byte[] {1, 2, 3}), (e) -> {});
assertEquals(1, getMetricLongValue("decode", "writes_in"));
assertEquals(2, getMetricLongValue("decode", "out"));
} |
Object getCellValue(Cell cell, Schema.FieldType type) {
ByteString cellValue = cell.getValue();
int valueSize = cellValue.size();
switch (type.getTypeName()) {
case BOOLEAN:
checkArgument(valueSize == 1, message("Boolean", 1));
return cellValue.toByteArray()[0] != 0;
case BYTE:
checkArgument(valueSize == 1, message("Byte", 1));
return cellValue.toByteArray()[0];
case INT16:
checkArgument(valueSize == 2, message("Int16", 2));
return Shorts.fromByteArray(cellValue.toByteArray());
case INT32:
checkArgument(valueSize == 4, message("Int32", 4));
return Ints.fromByteArray(cellValue.toByteArray());
case INT64:
checkArgument(valueSize == 8, message("Int64", 8));
return Longs.fromByteArray(cellValue.toByteArray());
case FLOAT:
checkArgument(valueSize == 4, message("Float", 4));
return Float.intBitsToFloat(Ints.fromByteArray(cellValue.toByteArray()));
case DOUBLE:
checkArgument(valueSize == 8, message("Double", 8));
return Double.longBitsToDouble(Longs.fromByteArray(cellValue.toByteArray()));
case DATETIME:
return DateTime.parse(cellValue.toStringUtf8());
case STRING:
return cellValue.toStringUtf8();
case BYTES:
return cellValue.toByteArray();
case LOGICAL_TYPE:
String identifier = checkArgumentNotNull(type.getLogicalType()).getIdentifier();
throw new IllegalStateException("Unsupported logical type: " + identifier);
default:
throw new IllegalArgumentException(
String.format("Unsupported cell value type '%s'.", type.getTypeName()));
}
} | @Test
public void shouldParseInt32Type() {
byte[] value = new byte[] {0, 2, 0, 0};
assertEquals(131072, PARSER.getCellValue(cell(value), INT32));
} |
@Override
public Map<V, String> hash(V... members) {
return get(hashAsync(members));
} | @Test
public void testHash() {
RGeo<String> geo = redisson.getGeo("test");
geo.add(new GeoEntry(13.361389, 38.115556, "Palermo"), new GeoEntry(15.087269, 37.502669, "Catania"));
Map<String, String> expected = new LinkedHashMap<>();
expected.put("Palermo", "sqc8b49rny0");
expected.put("Catania", "sqdtr74hyu0");
assertThat(geo.hash("Palermo", "Catania")).isEqualTo(expected);
} |
public void report(String message) {
if (monochrome) {
message = message.replaceAll("\u001B\\[[;\\d]*m", "");
}
out.print(message);
} | @Test
void printsTheCorrespondingReportsCucumberIoUrl() throws UnsupportedEncodingException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
UrlReporter urlReporter = new UrlReporter(new PrintStream(bytes, false, StandardCharsets.UTF_8.name()));
urlReporter.report(message);
assertThat(bytes, bytes(equalTo(message)));
} |
public void decode(ByteBuf buffer) {
boolean last;
int statusCode;
while (true) {
switch(state) {
case READ_COMMON_HEADER:
if (buffer.readableBytes() < SPDY_HEADER_SIZE) {
return;
}
int frameOffset = buffer.readerIndex();
int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET;
int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET;
buffer.skipBytes(SPDY_HEADER_SIZE);
boolean control = (buffer.getByte(frameOffset) & 0x80) != 0;
int version;
int type;
if (control) {
// Decode control frame common header
version = getUnsignedShort(buffer, frameOffset) & 0x7FFF;
type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET);
streamId = 0; // Default to session Stream-ID
} else {
// Decode data frame common header
version = spdyVersion; // Default to expected version
type = SPDY_DATA_FRAME;
streamId = getUnsignedInt(buffer, frameOffset);
}
flags = buffer.getByte(flagsOffset);
length = getUnsignedMedium(buffer, lengthOffset);
// Check version first then validity
if (version != spdyVersion) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SPDY Version");
} else if (!isValidFrameHeader(streamId, type, flags, length)) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid Frame Error");
} else {
state = getNextState(type, length);
}
break;
case READ_DATA_FRAME:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0));
break;
}
// Generate data frames that do not exceed maxChunkSize
int dataLength = Math.min(maxChunkSize, length);
// Wait until entire frame is readable
if (buffer.readableBytes() < dataLength) {
return;
}
ByteBuf data = buffer.alloc().buffer(dataLength);
data.writeBytes(buffer, dataLength);
length -= dataLength;
if (length == 0) {
state = State.READ_COMMON_HEADER;
}
last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN);
delegate.readDataFrame(streamId, last, data);
break;
case READ_SYN_STREAM_FRAME:
if (buffer.readableBytes() < 10) {
return;
}
int offset = buffer.readerIndex();
streamId = getUnsignedInt(buffer, offset);
int associatedToStreamId = getUnsignedInt(buffer, offset + 4);
byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07);
last = hasFlag(flags, SPDY_FLAG_FIN);
boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL);
buffer.skipBytes(10);
length -= 10;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_STREAM Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional);
}
break;
case READ_SYN_REPLY_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_REPLY Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynReplyFrame(streamId, last);
}
break;
case READ_RST_STREAM_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (streamId == 0 || statusCode == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid RST_STREAM Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readRstStreamFrame(streamId, statusCode);
}
break;
case READ_SETTINGS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR);
numSettings = getUnsignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
length -= 4;
// Validate frame length against number of entries. Each ID/Value entry is 8 bytes.
if ((length & 0x07) != 0 || length >> 3 != numSettings) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SETTINGS Frame");
} else {
state = State.READ_SETTING;
delegate.readSettingsFrame(clear);
}
break;
case READ_SETTING:
if (numSettings == 0) {
state = State.READ_COMMON_HEADER;
delegate.readSettingsEnd();
break;
}
if (buffer.readableBytes() < 8) {
return;
}
byte settingsFlags = buffer.getByte(buffer.readerIndex());
int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1);
int value = getSignedInt(buffer, buffer.readerIndex() + 4);
boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE);
boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED);
buffer.skipBytes(8);
--numSettings;
delegate.readSetting(id, value, persistValue, persisted);
break;
case READ_PING_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
int pingId = getSignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
state = State.READ_COMMON_HEADER;
delegate.readPingFrame(pingId);
break;
case READ_GOAWAY_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
state = State.READ_COMMON_HEADER;
delegate.readGoAwayFrame(lastGoodStreamId, statusCode);
break;
case READ_HEADERS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid HEADERS Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readHeadersFrame(streamId, last);
}
break;
case READ_WINDOW_UPDATE_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (deltaWindowSize == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid WINDOW_UPDATE Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readWindowUpdateFrame(streamId, deltaWindowSize);
}
break;
case READ_HEADER_BLOCK:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readHeaderBlockEnd();
break;
}
if (!buffer.isReadable()) {
return;
}
int compressedBytes = Math.min(buffer.readableBytes(), length);
ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes);
headerBlock.writeBytes(buffer, compressedBytes);
length -= compressedBytes;
delegate.readHeaderBlock(headerBlock);
break;
case DISCARD_FRAME:
int numBytes = Math.min(buffer.readableBytes(), length);
buffer.skipBytes(numBytes);
length -= numBytes;
if (length == 0) {
state = State.READ_COMMON_HEADER;
break;
}
return;
case FRAME_ERROR:
buffer.skipBytes(buffer.readableBytes());
return;
default:
throw new Error("Shouldn't reach here.");
}
}
} | @Test
public void testInvalidSpdyRstStreamFrameLength() throws Exception {
short type = 3;
byte flags = 0;
int length = 12; // invalid length
int streamId = RANDOM.nextInt() & 0x7FFFFFFF | 0x01;
int statusCode = RANDOM.nextInt() | 0x01;
ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length);
encodeControlFrameHeader(buf, type, flags, length);
buf.writeInt(streamId);
buf.writeInt(statusCode);
decoder.decode(buf);
verify(delegate).readFrameError(anyString());
assertFalse(buf.isReadable());
buf.release();
} |
static void dissectFrame(
final DriverEventCode eventCode,
final MutableDirectBuffer buffer,
final int offset,
final StringBuilder builder)
{
int encodedLength = dissectLogHeader(CONTEXT, eventCode, buffer, offset, builder);
builder.append(": address=");
encodedLength += dissectSocketAddress(buffer, offset + encodedLength, builder);
builder.append(" ");
final int frameOffset = offset + encodedLength;
final int frameType = frameType(buffer, frameOffset);
switch (frameType)
{
case HeaderFlyweight.HDR_TYPE_PAD:
case HeaderFlyweight.HDR_TYPE_DATA:
DATA_HEADER.wrap(buffer, frameOffset, buffer.capacity() - frameOffset);
dissectDataFrame(builder);
break;
case HeaderFlyweight.HDR_TYPE_SM:
SM_HEADER.wrap(buffer, frameOffset, buffer.capacity() - frameOffset);
dissectStatusFrame(builder);
break;
case HeaderFlyweight.HDR_TYPE_NAK:
NAK_HEADER.wrap(buffer, frameOffset, buffer.capacity() - frameOffset);
dissectNakFrame(builder);
break;
case HeaderFlyweight.HDR_TYPE_SETUP:
SETUP_HEADER.wrap(buffer, frameOffset, buffer.capacity() - frameOffset);
dissectSetupFrame(builder);
break;
case HeaderFlyweight.HDR_TYPE_RTTM:
RTT_MEASUREMENT.wrap(buffer, frameOffset, buffer.capacity() - frameOffset);
dissectRttFrame(builder);
break;
case HeaderFlyweight.HDR_TYPE_RES:
dissectResFrame(buffer, frameOffset, builder);
break;
case HeaderFlyweight.HDR_TYPE_RSP_SETUP:
RSP_SETUP.wrap(buffer, frameOffset, buffer.capacity() - frameOffset);
dissectRspSetupFrame(builder);
break;
default:
builder.append("type=UNKNOWN(").append(frameType).append(")");
break;
}
} | @Test
void dissectFrameTypeRtt()
{
internalEncodeLogHeader(buffer, 0, 3, 3, () -> 3_000_000_000L);
final int socketAddressOffset = encodeSocketAddress(
buffer, LOG_HEADER_LENGTH, new InetSocketAddress("localhost", 8888));
final RttMeasurementFlyweight flyweight = new RttMeasurementFlyweight();
flyweight.wrap(buffer, LOG_HEADER_LENGTH + socketAddressOffset, 300);
flyweight.headerType(HDR_TYPE_RTTM);
flyweight.flags((short)20);
flyweight.frameLength(100);
flyweight.sessionId(0);
flyweight.streamId(1);
flyweight.echoTimestampNs(123456789);
flyweight.receptionDelta(354);
flyweight.receiverId(22);
dissectFrame(FRAME_OUT, buffer, 0, builder);
assertEquals("[3.000000000] " + CONTEXT + ": " + FRAME_OUT.name() + " [3/3]: " +
"address=127.0.0.1:8888 type=RTT flags=00010100 frameLength=100 sessionId=0 streamId=1 " +
"echoTimestampNs=123456789 receptionDelta=354 receiverId=22",
builder.toString());
} |
public static MessageType convertToParquetMessageType(String name, RowType rowType) {
Type[] types = new Type[rowType.getFieldCount()];
for (int i = 0; i < rowType.getFieldCount(); i++) {
String fieldName = rowType.getFieldNames().get(i);
LogicalType fieldType = rowType.getTypeAt(i);
types[i] = convertToParquetType(fieldName, fieldType, fieldType.isNullable() ? Type.Repetition.OPTIONAL : Type.Repetition.REQUIRED);
}
return new MessageType(name, types);
} | @Test
void testConvertComplexTypes() {
DataType dataType = DataTypes.ROW(
DataTypes.FIELD("f_array",
DataTypes.ARRAY(DataTypes.CHAR(10))),
DataTypes.FIELD("f_map",
DataTypes.MAP(DataTypes.INT(), DataTypes.VARCHAR(20))),
DataTypes.FIELD("f_row",
DataTypes.ROW(
DataTypes.FIELD("f_row_f0", DataTypes.INT()),
DataTypes.FIELD("f_row_f1", DataTypes.VARCHAR(10)),
DataTypes.FIELD("f_row_f2",
DataTypes.ROW(
DataTypes.FIELD("f_row_f2_f0", DataTypes.INT()),
DataTypes.FIELD("f_row_f2_f1", DataTypes.VARCHAR(10)))))));
org.apache.parquet.schema.MessageType messageType =
ParquetSchemaConverter.convertToParquetMessageType("converted", (RowType) dataType.getLogicalType());
assertThat(messageType.getColumns().size(), is(7));
final String expected = "message converted {\n"
+ " optional group f_array (LIST) {\n"
+ " repeated group list {\n"
+ " optional binary element (STRING);\n"
+ " }\n"
+ " }\n"
+ " optional group f_map (MAP) {\n"
+ " repeated group key_value {\n"
+ " required int32 key;\n"
+ " optional binary value (STRING);\n"
+ " }\n"
+ " }\n"
+ " optional group f_row {\n"
+ " optional int32 f_row_f0;\n"
+ " optional binary f_row_f1 (STRING);\n"
+ " optional group f_row_f2 {\n"
+ " optional int32 f_row_f2_f0;\n"
+ " optional binary f_row_f2_f1 (STRING);\n"
+ " }\n"
+ " }\n"
+ "}\n";
assertThat(messageType.toString(), is(expected));
} |
synchronized void add(int splitCount) {
int pos = count % history.length;
history[pos] = splitCount;
count += 1;
} | @Test
public void testThreeMoreThanFullHistory() {
EnumerationHistory history = new EnumerationHistory(3);
history.add(1);
history.add(2);
history.add(3);
history.add(4);
history.add(5);
history.add(6);
int[] expectedHistorySnapshot = {4, 5, 6};
testHistory(history, expectedHistorySnapshot);
} |
@Override
public List<Bar> aggregate(List<Bar> bars) {
final List<Bar> aggregated = new ArrayList<>();
if (bars.isEmpty()) {
return aggregated;
}
final Bar firstBar = bars.get(0);
// get the actual time period
final Duration actualDur = firstBar.getTimePeriod();
// check if new timePeriod is a multiplication of actual time period
final boolean isMultiplication = timePeriod.getSeconds() % actualDur.getSeconds() == 0;
if (!isMultiplication) {
throw new IllegalArgumentException(
"Cannot aggregate bars: the new timePeriod must be a multiplication of the actual timePeriod.");
}
int i = 0;
final Num zero = firstBar.getOpenPrice().zero();
while (i < bars.size()) {
Bar bar = bars.get(i);
final ZonedDateTime beginTime = bar.getBeginTime();
final Num open = bar.getOpenPrice();
Num high = bar.getHighPrice();
Num low = bar.getLowPrice();
Num close = null;
Num volume = zero;
Num amount = zero;
long trades = 0;
Duration sumDur = Duration.ZERO;
while (isInDuration(sumDur)) {
if (i < bars.size()) {
if (!beginTimesInDuration(beginTime, bars.get(i).getBeginTime())) {
break;
}
bar = bars.get(i);
if (high == null || bar.getHighPrice().isGreaterThan(high)) {
high = bar.getHighPrice();
}
if (low == null || bar.getLowPrice().isLessThan(low)) {
low = bar.getLowPrice();
}
close = bar.getClosePrice();
if (bar.getVolume() != null) {
volume = volume.plus(bar.getVolume());
}
if (bar.getAmount() != null) {
amount = amount.plus(bar.getAmount());
}
if (bar.getTrades() != 0) {
trades = trades + bar.getTrades();
}
}
sumDur = sumDur.plus(actualDur);
i++;
}
if (!onlyFinalBars || i <= bars.size()) {
final Bar aggregatedBar = new BaseBar(timePeriod, beginTime.plus(timePeriod), open, high, low, close,
volume, amount, trades);
aggregated.add(aggregatedBar);
}
}
return aggregated;
} | @Test
public void upscaledTo5DayBars() {
final DurationBarAggregator barAggregator = new DurationBarAggregator(Duration.ofDays(5), true);
final List<Bar> bars = barAggregator.aggregate(getOneDayBars());
// must be 3 bars
assertEquals(3, bars.size());
// bar 1 must have ohlcv (1, 6, 4, 9, 25)
final Bar bar1 = bars.get(0);
final Num num1 = bar1.getOpenPrice();
assertNumEquals(num1.numOf(1), bar1.getOpenPrice());
assertNumEquals(num1.numOf(6), bar1.getHighPrice());
assertNumEquals(num1.numOf(4), bar1.getLowPrice());
assertNumEquals(num1.numOf(9), bar1.getClosePrice());
assertNumEquals(num1.numOf(33), bar1.getVolume());
// bar 2 must have ohlcv (6, 91, 4, 10, 260)
final Bar bar2 = bars.get(1);
final Num num2 = bar2.getOpenPrice();
assertNumEquals(num2.numOf(6), bar2.getOpenPrice());
assertNumEquals(num2.numOf(91), bar2.getHighPrice());
assertNumEquals(num2.numOf(4), bar2.getLowPrice());
assertNumEquals(num2.numOf(10), bar2.getClosePrice());
assertNumEquals(num2.numOf(260), bar2.getVolume());
// bar 3 must have ohlcv (1d, 6d, 4d, 9d, 25)
Bar bar3 = bars.get(2);
Num num3 = bar3.getOpenPrice();
assertNumEquals(num3.numOf(4), bar3.getOpenPrice());
assertNumEquals(num3.numOf(991), bar3.getHighPrice());
assertNumEquals(num3.numOf(43), bar3.getLowPrice());
assertNumEquals(num3.numOf(10), bar3.getClosePrice());
assertNumEquals(num3.numOf(1010), bar3.getVolume());
} |
@Injection( name = "TRUNCATE_TABLE" )
public void metaSetTruncateTable( String value ) {
setTruncateTable( "Y".equalsIgnoreCase( value ) );
} | @Test
public void metaSetTruncateTable() {
TableOutputMeta tableOutputMeta = new TableOutputMeta();
tableOutputMeta.metaSetTableNameDefinedInField( "Y" );
assertTrue( tableOutputMeta.isTableNameInField() );
tableOutputMeta.metaSetTableNameDefinedInField( "N" );
assertFalse( tableOutputMeta.isTableNameInField() );
tableOutputMeta.metaSetTableNameDefinedInField( "Ynot" );
assertFalse( tableOutputMeta.isTableNameInField() );
} |
@Override
public ExecuteResult execute(final ServiceContext serviceContext, final ConfiguredKsqlPlan plan,
final boolean restoreInProgress) {
try {
final ExecuteResult result = EngineExecutor
.create(primaryContext, serviceContext, plan.getConfig())
.execute(plan.getPlan(), restoreInProgress);
return result;
} catch (final KsqlStatementException e) {
throw e;
} catch (final KsqlException e) {
// add the statement text to the KsqlException
throw new KsqlStatementException(
e.getMessage(),
e.getMessage(),
plan.getPlan().getStatementText(),
e.getCause()
);
}
} | @Test
public void shouldFailDropStreamWhenAnotherStreamIsReadingTheTable() {
// Given:
setupKsqlEngineWithSharedRuntimeEnabled();
KsqlEngineTestUtil.execute(
serviceContext,
ksqlEngine,
"create stream bar as select * from test1;"
+ "create stream foo as select * from bar;",
ksqlConfig,
Collections.emptyMap()
);
// When:
final KsqlStatementException e = assertThrows(
KsqlStatementException.class,
() -> KsqlEngineTestUtil.execute(
serviceContext,
ksqlEngine,
"drop stream bar;",
ksqlConfig,
Collections.emptyMap()
)
);
// Then:
assertThat(e, rawMessage(is(
"Cannot drop BAR.\n"
+ "The following streams and/or tables read from this source: [FOO].\n"
+ "You need to drop them before dropping BAR.")));
assertThat(e, statementText(is("drop stream bar;")));
} |
@PUT
@Path("{noteId}/rename")
@ZeppelinApi
public Response renameNote(@PathParam("noteId") String noteId,
String message) throws IOException {
LOGGER.info("Rename note by JSON {}", message);
RenameNoteRequest request = GSON.fromJson(message, RenameNoteRequest.class);
String newName = request.getName();
if (newName.isEmpty()) {
LOGGER.warn("Trying to rename notebook {} with empty name parameter", noteId);
throw new BadRequestException("name can not be empty");
}
notebookService.renameNote(noteId, request.getName(), false, getServiceContext(),
new RestServiceCallback<Note>() {
@Override
public void onSuccess(Note note, ServiceContext context) throws IOException {
notebookServer.broadcastNote(note);
notebookServer.broadcastNoteList(context.getAutheInfo(), context.getUserAndRoles());
}
});
return new JsonResponse<>(Status.OK, "").build();
} | @Test
void testRenameNote() throws IOException {
LOG.info("Running testRenameNote");
String noteId = null;
try {
String oldName = "old_name";
noteId = notebook.createNote(oldName, anonymous);
assertEquals(oldName, notebook.processNote(noteId, Note::getName));
final String newName = "testName";
String jsonRequest = "{\"name\": " + newName + "}";
CloseableHttpResponse put = httpPut("/notebook/" + noteId + "/rename/", jsonRequest);
assertThat("test testRenameNote:", put, isAllowed());
put.close();
assertEquals(newName, notebook.processNote(noteId, Note::getName));
} finally {
// cleanup
if (null != noteId) {
notebook.removeNote(noteId, anonymous);
}
}
} |
@Override
public Object toKsqlRow(final Schema connectSchema, final Object connectData) {
if (connectData == null) {
return null;
}
return toKsqlValue(schema, connectSchema, connectData, "");
} | @Test
public void shouldTranslateMapWithStructValues() {
// Given:
final Schema innerSchema = SchemaBuilder
.struct()
.field("FIELD", Schema.OPTIONAL_INT32_SCHEMA)
.build();
final Schema rowSchema = SchemaBuilder
.struct()
.field("MAP", SchemaBuilder
.map(Schema.STRING_SCHEMA, innerSchema)
.optional()
.build()
).build();
final Struct connectStruct = new Struct(rowSchema);
final Struct inner1 = new Struct(innerSchema);
inner1.put("FIELD", 123);
final Struct inner2 = new Struct(innerSchema);
inner2.put("FIELD", 456);
connectStruct.put("MAP", ImmutableMap.of("k1", inner1, "k2", inner2));
final ConnectDataTranslator connectToKsqlTranslator = new ConnectDataTranslator(rowSchema);
// When:
final Struct row = (Struct) connectToKsqlTranslator.toKsqlRow(rowSchema, connectStruct);
assertThat(row.get("MAP"), instanceOf(Map.class));
final Map<String, Struct> map = (Map<String, Struct>)row.get("MAP");
assertThat(map.get("k1").get("FIELD"), equalTo(123));
assertThat(map.get("k2").get("FIELD"), equalTo(456));
} |
public static <T> IntermediateCompatibilityResult<T> constructIntermediateCompatibilityResult(
TypeSerializerSnapshot<?>[] newNestedSerializerSnapshots,
TypeSerializerSnapshot<?>[] oldNestedSerializerSnapshots) {
Preconditions.checkArgument(
newNestedSerializerSnapshots.length == oldNestedSerializerSnapshots.length,
"Different number of new serializer snapshots and existing serializer snapshots.");
TypeSerializer<?>[] nestedSerializers =
new TypeSerializer[newNestedSerializerSnapshots.length];
// check nested serializers for compatibility
boolean nestedSerializerRequiresMigration = false;
boolean hasReconfiguredNestedSerializers = false;
for (int i = 0; i < oldNestedSerializerSnapshots.length; i++) {
TypeSerializerSchemaCompatibility<?> compatibility =
resolveCompatibility(
newNestedSerializerSnapshots[i], oldNestedSerializerSnapshots[i]);
// if any one of the new nested serializers is incompatible, we can just short circuit
// the result
if (compatibility.isIncompatible()) {
return IntermediateCompatibilityResult.definedIncompatibleResult();
}
if (compatibility.isCompatibleAfterMigration()) {
nestedSerializerRequiresMigration = true;
} else if (compatibility.isCompatibleWithReconfiguredSerializer()) {
hasReconfiguredNestedSerializers = true;
nestedSerializers[i] = compatibility.getReconfiguredSerializer();
} else if (compatibility.isCompatibleAsIs()) {
nestedSerializers[i] = newNestedSerializerSnapshots[i].restoreSerializer();
} else {
throw new IllegalStateException("Undefined compatibility type.");
}
}
if (nestedSerializerRequiresMigration) {
return IntermediateCompatibilityResult.definedCompatibleAfterMigrationResult();
}
if (hasReconfiguredNestedSerializers) {
return IntermediateCompatibilityResult.undefinedReconfigureResult(nestedSerializers);
}
// ends up here if everything is compatible as is
return IntermediateCompatibilityResult.definedCompatibleAsIsResult(nestedSerializers);
} | @Test
void testCompatibleAsIsIntermediateCompatibilityResult() {
final TypeSerializerSnapshot<?>[] previousSerializerSnapshots =
new TypeSerializerSnapshot<?>[] {
new SchemaCompatibilityTestingSerializer("first serializer")
.snapshotConfiguration(),
new SchemaCompatibilityTestingSerializer("second serializer")
.snapshotConfiguration(),
};
final TypeSerializerSnapshot<?>[] newSerializerSnapshots =
new TypeSerializerSnapshot<?>[] {
SchemaCompatibilityTestingSnapshot.thatIsCompatibleWithLastSerializer(
"first serializer"),
SchemaCompatibilityTestingSnapshot.thatIsCompatibleWithLastSerializer(
"second serializer"),
};
IntermediateCompatibilityResult<?> intermediateCompatibilityResult =
CompositeTypeSerializerUtil.constructIntermediateCompatibilityResult(
newSerializerSnapshots, previousSerializerSnapshots);
assertThat(intermediateCompatibilityResult.isCompatibleAsIs()).isTrue();
assertThat(intermediateCompatibilityResult.getFinalResult().isCompatibleAsIs()).isTrue();
assertThat(intermediateCompatibilityResult.getNestedSerializers())
.containsExactly(
Arrays.stream(newSerializerSnapshots)
.map(TypeSerializerSnapshot::restoreSerializer)
.toArray(TypeSerializer[]::new));
} |
public List<ScimGroupDto> findScimGroups(DbSession dbSession, ScimGroupQuery query, Pagineable pagination) {
return mapper(dbSession).findScimGroups(query, pagination);
} | @Test
void findScimGroups_whenFilteringByDisplayName_shouldReturnTheExpectedScimGroups() {
insertGroupAndScimGroup("group1");
insertGroupAndScimGroup("group2");
ScimGroupQuery query = ScimGroupQuery.fromScimFilter(DISPLAY_NAME_FILTER);
List<ScimGroupDto> scimGroups = scimGroupDao.findScimGroups(db.getSession(), query, Pagination.all());
assertThat(scimGroups).hasSize(1);
assertThat(scimGroups.get(0).getScimGroupUuid()).isEqualTo(createScimGroupUuid("group2"));
} |
@Override
public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
return true;
}
try {
return new DropboxAttributesFinderFeature(session).find(file, listener) != PathAttributes.EMPTY;
}
catch(NotfoundException e) {
return false;
}
} | @Test
public void testFindNotFound() throws Exception {
assertFalse(new DropboxFindFeature(session).find(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file))));
} |
@Override
public int getIntLE(int index) {
checkIndex(index, 4);
return _getIntLE(index);
} | @Test
public void testGetIntLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getIntLE(0);
}
});
} |
@Override
public void writeDouble(final double v) throws IOException {
ensureAvailable(DOUBLE_SIZE_IN_BYTES);
MEM.putDouble(buffer, ARRAY_BYTE_BASE_OFFSET + pos, v);
pos += DOUBLE_SIZE_IN_BYTES;
} | @Test
public void testWriteDoubleV() throws Exception {
double expected = 1.1d;
out.writeDouble(expected);
long theLong = Bits.readLong(out.buffer, 0, ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN);
double actual = Double.longBitsToDouble(theLong);
assertEquals(expected, actual, 0);
} |
@Override
public int size() {
if (entries == null) {
return 0;
}
return entries.size();
} | @Test
public void testSize_whenEmpty() {
List<Map.Entry> emptyList = Collections.emptyList();
ResultSet resultSet = new ResultSet(emptyList, IterationType.KEY);
assertEquals(0, resultSet.size());
} |
@Subscribe
public void onChatMessage(ChatMessage event)
{
if (event.getType() != ChatMessageType.SPAM
&& event.getType() != ChatMessageType.GAMEMESSAGE
&& event.getType() != ChatMessageType.MESBOX)
{
return;
}
final var msg = event.getMessage();
if (WOOD_CUT_PATTERN.matcher(msg).matches())
{
if (session == null)
{
session = new WoodcuttingSession();
}
session.setLastChopping();
session.incrementLogsCut();
}
var matcher = ANIMA_BARK_PATTERN.matcher(msg);
if (matcher.matches())
{
if (session == null)
{
session = new WoodcuttingSession();
}
session.setLastChopping();
int num = Integer.parseInt(matcher.group(1));
session.incrementBark(num);
}
if (msg.contains("A bird's nest falls out of the tree"))
{
if (clueTierSpawned == null || clueTierSpawned.ordinal() >= config.clueNestNotifyTier().ordinal())
{
notifier.notify(config.showNestNotification(), "A bird nest has spawned!");
}
// Clear the clue tier that has previously spawned
clueTierSpawned = null;
}
if (msg.startsWith("The sapling seems to love"))
{
int ingredientNum = msg.contains("first") ? 1 : (msg.contains("second") ? 2 : (msg.contains("third") ? 3 : -1));
if (ingredientNum == -1)
{
log.debug("unable to find ingredient index from message: {}", msg);
return;
}
GameObject ingredientObj = saplingIngredients.stream()
.filter(obj -> msg.contains(client.getObjectDefinition(obj.getId()).getName().toLowerCase()))
.findAny()
.orElse(null);
if (ingredientObj == null)
{
log.debug("unable to find ingredient from message: {}", msg);
return;
}
saplingOrder[ingredientNum - 1] = ingredientObj;
}
if (msg.equals("There are no open, unpollinated flowers on this bush yet.")
|| msg.equals("The flowers on this bush have not yet opened enough to harvest pollen.")
|| msg.equals("<col=06600c>The bush is already fruiting and won't benefit from <col=06600c>any more pollen.</col>"))
{
if (activeFlowers.contains(lastInteractFlower))
{
log.debug("Flowers reset");
activeFlowers.clear();
}
}
} | @Test
public void testArcticLogs()
{
ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.SPAM, "", "You get an arctic log.", "", 0);
woodcuttingPlugin.onChatMessage(chatMessage);
assertNotNull(woodcuttingPlugin.getSession());
} |
public static void trackNotificationOpenedEvent(String sfData,
String title,
String content,
String appPushServiceName,
String appPushChannel) {
trackNotificationOpenedEvent(sfData, title, content, appPushServiceName, appPushChannel, 0L);
} | @Test
public void trackNotificationOpenedEvent() {
try {
trackJPushOpenActivity();
} catch (InterruptedException e) {
e.printStackTrace();
}
} |
@Nullable
public Integer getIntValue(@IntFormat final int formatType,
@IntRange(from = 0) final int offset) {
if ((offset + getTypeLen(formatType)) > size()) return null;
return switch (formatType) {
case FORMAT_UINT8 -> unsignedByteToInt(mValue[offset]);
case FORMAT_UINT16_LE -> unsignedBytesToInt(mValue[offset], mValue[offset + 1]);
case FORMAT_UINT16_BE -> unsignedBytesToInt(mValue[offset + 1], mValue[offset]);
case FORMAT_UINT24_LE -> unsignedBytesToInt(
mValue[offset],
mValue[offset + 1],
mValue[offset + 2],
(byte) 0
);
case FORMAT_UINT24_BE -> unsignedBytesToInt(
mValue[offset + 2],
mValue[offset + 1],
mValue[offset],
(byte) 0
);
case FORMAT_UINT32_LE -> unsignedBytesToInt(
mValue[offset],
mValue[offset + 1],
mValue[offset + 2],
mValue[offset + 3]
);
case FORMAT_UINT32_BE -> unsignedBytesToInt(
mValue[offset + 3],
mValue[offset + 2],
mValue[offset + 1],
mValue[offset]
);
case FORMAT_SINT8 -> unsignedToSigned(unsignedByteToInt(mValue[offset]), 8);
case FORMAT_SINT16_LE -> unsignedToSigned(unsignedBytesToInt(mValue[offset],
mValue[offset + 1]), 16);
case FORMAT_SINT16_BE -> unsignedToSigned(unsignedBytesToInt(mValue[offset + 1],
mValue[offset]), 16);
case FORMAT_SINT24_LE -> unsignedToSigned(unsignedBytesToInt(
mValue[offset],
mValue[offset + 1],
mValue[offset + 2],
(byte) 0
), 24);
case FORMAT_SINT24_BE -> unsignedToSigned(unsignedBytesToInt(
(byte) 0,
mValue[offset + 2],
mValue[offset + 1],
mValue[offset]
), 24);
case FORMAT_SINT32_LE -> unsignedToSigned(unsignedBytesToInt(
mValue[offset],
mValue[offset + 1],
mValue[offset + 2],
mValue[offset + 3]
), 32);
case FORMAT_SINT32_BE -> unsignedToSigned(unsignedBytesToInt(
mValue[offset + 3],
mValue[offset + 2],
mValue[offset + 1],
mValue[offset]
), 32);
default -> null;
};
} | @Test
public void getValue_UINT8() {
final Data data = new Data(new byte[] {(byte) 0xC8 });
final int value = data.getIntValue(Data.FORMAT_UINT8, 0);
assertEquals(200, value);
} |
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n) {
return invoke(n, BigDecimal.ZERO);
} | @Test
void invokeRoundingEven() {
FunctionTestUtil.assertResult(roundHalfDownFunction.invoke(BigDecimal.valueOf(10.25)), BigDecimal.valueOf(10));
FunctionTestUtil.assertResult(roundHalfDownFunction.invoke(BigDecimal.valueOf(10.25), BigDecimal.ONE),
BigDecimal.valueOf(10.2));
} |
@ExceptionHandler(UnauthorizedException.class)
protected ShenyuAdminResult handleUnauthorizedException(final UnauthorizedException exception) {
LOG.error("unauthorized exception", exception);
return ShenyuAdminResult.error(CommonErrorCode.TOKEN_NO_PERMISSION, ShenyuResultMessage.TOKEN_HAS_NO_PERMISSION);
} | @Test
public void testShiroExceptionHandler() {
UnauthorizedException unauthorizedException = new UnauthorizedException("Test unauthorizedException");
ShenyuAdminResult result = exceptionHandlersUnderTest.handleUnauthorizedException(unauthorizedException);
Assertions.assertEquals(result.getCode().intValue(), CommonErrorCode.TOKEN_NO_PERMISSION);
Assertions.assertEquals(result.getMessage(), ShenyuResultMessage.TOKEN_HAS_NO_PERMISSION);
} |
@Override
public List<ActionParameter> getParameters() {
return List.of(
ActionParameter.from("filename", "Relative path of the file to write in the agent workspace"),
ActionParameter.from("body", "Text content to write to the file. Binary is not supported.")
);
} | @Test
void testGetParameters() {
List<ActionParameter> parameters = writeFileAction.getParameters();
assertEquals(2, parameters.size());
assertEquals("filename", parameters.get(0).getName());
assertEquals("Relative path of the file to write in the agent workspace", parameters.get(0).getDescription());
assertEquals("body", parameters.get(1).getName());
assertEquals("Text content to write to the file. Binary is not supported.", parameters.get(1).getDescription());
} |
public static void verifyIncrementPubContent(String content) {
if (content == null || content.length() == 0) {
throw new IllegalArgumentException("publish/delete content can not be null");
}
for (int i = 0; i < content.length(); i++) {
char c = content.charAt(i);
if (c == '\r' || c == '\n') {
throw new IllegalArgumentException("publish/delete content can not contain return and linefeed");
}
if (c == Constants.WORD_SEPARATOR.charAt(0)) {
throw new IllegalArgumentException("publish/delete content can not contain(char)2");
}
}
} | @Test
void testVerifyIncrementPubContentFail4() {
Throwable exception = assertThrows(IllegalArgumentException.class, () -> {
String content = "aa" + WORD_SEPARATOR + "bbb";
ContentUtils.verifyIncrementPubContent(content);
});
assertTrue(exception.getMessage().contains("publish/delete content can not contain(char)2"));
} |
@Deprecated
public static void unJarAndSave(InputStream inputStream, File toDir,
String name, Pattern unpackRegex)
throws IOException{
File file = new File(toDir, name);
ensureDirectory(toDir);
try (OutputStream jar = Files.newOutputStream(file.toPath());
TeeInputStream teeInputStream = new TeeInputStream(inputStream, jar)) {
unJar(teeInputStream, toDir, unpackRegex);
}
} | @SuppressWarnings("deprecation")
@Test
public void testBigJar() throws Exception {
Random r = new Random(System.currentTimeMillis());
File dir = new File(TEST_ROOT_DIR, Long.toHexString(r.nextLong()));
Assert.assertTrue(dir.mkdirs());
File input = generateBigJar(dir);
File output = new File(dir, "job2.jar");
try {
try (InputStream is = new FileInputStream(input)) {
RunJar.unJarAndSave(is, dir, "job2.jar", Pattern.compile(".*"));
}
Assert.assertEquals(input.length(), output.length());
for (int i = 0; i < 10; ++i) {
File subdir = new File(dir, ((i % 2 == 0) ? "dir/" : ""));
File f = new File(subdir, "f" + Integer.toString(i));
Assert.assertEquals(756, f.length());
}
} finally {
// Clean up
FileSystem fs = LocalFileSystem.getLocal(new Configuration());
fs.delete(new Path(dir.getAbsolutePath()), true);
}
} |
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Payload other = (Payload) o;
return this.compressionInfo.equals(other.compressionInfo) && this.data.equals(other.data);
} | @Test
public void testEquals() {
final String foo1 = "foo 1";
final String foo2 = "foo 2";
Payload a = Payload.from(foo1);
Payload b = Payload.from(foo1);
assertEquals(a, b);
Payload c = Payload.from(foo2);
assertNotEquals(a, c);
Slime slime = new Slime();
slime.setString(foo1);
Payload d = Payload.from(new ConfigPayload(slime));
assertNotEquals(a, d);
slime.setString(foo1);
Payload e = Payload.from(new ConfigPayload(slime));
assertEquals(d, e);
slime.setString("foo 2");
Payload f = Payload.from(new ConfigPayload(slime));
assertNotEquals(c, f);
Payload g = Payload.from(new Utf8Array(foo1.getBytes(StandardCharsets.UTF_8)), CompressionInfo.uncompressed());
Payload h = Payload.from(new Utf8Array(foo1.getBytes(StandardCharsets.UTF_8)), CompressionInfo.uncompressed());
assertEquals(a, g);
assertEquals(g, h);
LZ4PayloadCompressor compressor = new LZ4PayloadCompressor();
CompressionInfo info = CompressionInfo.create(CompressionType.LZ4, foo2.length());
Utf8Array compressed = new Utf8Array(compressor.compress(foo2.getBytes()));
Payload i = Payload.from(compressed, info);
Payload j = Payload.from(compressed, info);
assertEquals(i, j);
assertNotEquals(c, j);
} |
public static Catalog loadCatalog(
String impl, String catalogName, Map<String, String> properties, Object hadoopConf) {
Preconditions.checkNotNull(impl, "Cannot initialize custom Catalog, impl class name is null");
DynConstructors.Ctor<Catalog> ctor;
try {
ctor = DynConstructors.builder(Catalog.class).impl(impl).buildChecked();
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize Catalog implementation %s: %s", impl, e.getMessage()),
e);
}
Catalog catalog;
try {
catalog = ctor.newInstance();
} catch (ClassCastException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize Catalog, %s does not implement Catalog.", impl), e);
}
configureHadoopConf(catalog, hadoopConf);
catalog.initialize(catalogName, properties);
return catalog;
} | @Test
public void loadCustomCatalog_BadCatalogNameCatalog() {
Map<String, String> options = Maps.newHashMap();
options.put("key", "val");
Configuration hadoopConf = new Configuration();
String name = "custom";
String impl = "CatalogDoesNotExist";
assertThatThrownBy(() -> CatalogUtil.loadCatalog(impl, name, options, hadoopConf))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Cannot initialize Catalog implementation")
.hasMessageContaining("java.lang.ClassNotFoundException: CatalogDoesNotExist");
} |
@Override
public Cost plus(RelOptCost other) {
Cost other0 = (Cost) other;
if (isInfinite() || other.isInfinite()) {
return INFINITY;
}
return new Cost(rows + other0.rows, cpu + other0.cpu, network + other0.network);
} | @Test
public void testPlus() {
CostFactory factory = CostFactory.INSTANCE;
Cost firstCost = factory.makeCost(1.0d, 2.0d, 3.0d);
Cost secondCost = factory.makeCost(4.0d, 5.0d, 6.0d);
Cost infiniteCost = factory.makeInfiniteCost();
assertEquals(factory.makeCost(5.0d, 7.0d, 9.0d), firstCost.plus(secondCost));
assertEquals(infiniteCost, firstCost.plus(infiniteCost));
} |
@VisibleForTesting
static int parseRgb(Slice color)
{
if (color.length() != 4 || color.getByte(0) != '#') {
return -1;
}
int red = Character.digit((char) color.getByte(1), 16);
int green = Character.digit((char) color.getByte(2), 16);
int blue = Character.digit((char) color.getByte(3), 16);
if (red == -1 || green == -1 || blue == -1) {
return -1;
}
// replicate the nibbles to turn a color of the form #rgb => #rrggbb (css semantics)
red = (red << 4) | red;
green = (green << 4) | green;
blue = (blue << 4) | blue;
return (int) rgb(red, green, blue);
} | @Test
public void testParseRgb()
{
assertEquals(parseRgb(toSlice("#000")), 0x00_00_00);
assertEquals(parseRgb(toSlice("#FFF")), 0xFF_FF_FF);
assertEquals(parseRgb(toSlice("#F00")), 0xFF_00_00);
assertEquals(parseRgb(toSlice("#0F0")), 0x00_FF_00);
assertEquals(parseRgb(toSlice("#00F")), 0x00_00_FF);
assertEquals(parseRgb(toSlice("#700")), 0x77_00_00);
assertEquals(parseRgb(toSlice("#070")), 0x00_77_00);
assertEquals(parseRgb(toSlice("#007")), 0x00_00_77);
assertEquals(parseRgb(toSlice("#cde")), 0xCC_DD_EE);
} |
public CompletableFuture<Acknowledge> triggerSavepoint(
AsynchronousJobOperationKey operationKey,
String targetDirectory,
SavepointFormatType formatType,
TriggerSavepointMode savepointMode,
Time timeout) {
return registerOperationIdempotently(
operationKey,
() ->
triggerSavepointFunction.apply(
operationKey.getJobId(),
targetDirectory,
formatType,
savepointMode,
timeout));
} | @Test
public void throwsIfCacheIsShuttingDown() {
savepointTriggerCache.closeAsync();
assertThrows(
IllegalStateException.class,
() ->
handler.triggerSavepoint(
operationKey,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.SAVEPOINT,
TIMEOUT));
} |
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
throw new ReadOnlyBufferException();
} | @Test
public void shouldRejectSetBytes2() {
assertThrows(UnsupportedOperationException.class, new Executable() {
@Override
public void execute() throws IOException {
unmodifiableBuffer(EMPTY_BUFFER).setBytes(0, (ScatteringByteChannel) null, 0);
}
});
} |
@Override
public String convertDestination(ProtocolConverter converter, Destination d) {
if (d == null) {
return null;
}
ActiveMQDestination activeMQDestination = (ActiveMQDestination)d;
String physicalName = activeMQDestination.getPhysicalName();
String rc = converter.getCreatedTempDestinationName(activeMQDestination);
if( rc!=null ) {
return rc;
}
StringBuilder buffer = new StringBuilder();
if (activeMQDestination.isQueue()) {
if (activeMQDestination.isTemporary()) {
buffer.append("/remote-temp-queue/");
} else {
buffer.append("/queue/");
}
} else {
if (activeMQDestination.isTemporary()) {
buffer.append("/remote-temp-topic/");
} else {
buffer.append("/topic/");
}
}
buffer.append(physicalName);
return buffer.toString();
} | @Test(timeout = 10000)
public void testConvertCompositeMixture() throws Exception {
String destinationA = "destinationA";
String destinationB = "destinationB";
String destinationC = "destinationC";
String destinationD = "destinationD";
String composite = "/queue/" + destinationA + ",/topic/" + destinationB +
",/temp-queue/" + destinationC + ",/temp-topic/" + destinationD;
ActiveMQDestination destination = translator.convertDestination(converter, composite, false);
assertEquals(ActiveMQDestination.QUEUE_TYPE, destination.getDestinationType());
assertTrue(destination.isComposite());
ActiveMQDestination[] composites = destination.getCompositeDestinations();
assertEquals(4, composites.length);
Arrays.sort(composites);
boolean foundQueue = false;
boolean foundTopic = false;
boolean foundTempTopic = false;
boolean foundTempQueue = false;
for (ActiveMQDestination dest : composites) {
if (dest.getDestinationType() == ActiveMQDestination.QUEUE_TYPE) {
foundQueue = true;
} else if (dest.getDestinationType() == ActiveMQDestination.TOPIC_TYPE) {
foundTopic = true;
} else if (dest.getDestinationType() == ActiveMQDestination.TEMP_TOPIC_TYPE) {
foundTempTopic = true;
} else if (dest.getDestinationType() == ActiveMQDestination.TEMP_QUEUE_TYPE) {
foundTempQueue = true;
}
}
assertTrue(foundQueue);
assertTrue(foundTopic);
assertTrue(foundTempTopic);
assertTrue(foundTempQueue);
} |
@VisibleForTesting
Optional<Xpp3Dom> getSpringBootRepackageConfiguration() {
Plugin springBootPlugin =
project.getPlugin("org.springframework.boot:spring-boot-maven-plugin");
if (springBootPlugin != null) {
for (PluginExecution execution : springBootPlugin.getExecutions()) {
if (execution.getGoals().contains("repackage")) {
Xpp3Dom configuration = (Xpp3Dom) execution.getConfiguration();
if (configuration == null) {
return Optional.of(new Xpp3Dom("configuration"));
}
boolean skip = Boolean.parseBoolean(getChildValue(configuration, "skip").orElse("false"));
return skip ? Optional.empty() : Optional.of(configuration);
}
}
}
return Optional.empty();
} | @Test
public void testGetSpringBootRepackageConfiguration_noRepackageGoal() {
when(mockMavenProject.getPlugin("org.springframework.boot:spring-boot-maven-plugin"))
.thenReturn(mockPlugin);
when(mockPlugin.getExecutions()).thenReturn(Arrays.asList(mockPluginExecution));
when(mockPluginExecution.getGoals()).thenReturn(Arrays.asList("goal", "foo", "bar"));
assertThat(mavenProjectProperties.getSpringBootRepackageConfiguration()).isEmpty();
} |
@Override
public void deleteFile(Long id) throws Exception {
// 校验存在
FileDO file = validateFileExists(id);
// 从文件存储器中删除
FileClient client = fileConfigService.getFileClient(file.getConfigId());
Assert.notNull(client, "客户端({}) 不能为空", file.getConfigId());
client.delete(file.getPath());
// 删除记录
fileMapper.deleteById(id);
} | @Test
public void testDeleteFile_notExists() {
// 准备参数
Long id = randomLongId();
// 调用, 并断言异常
assertServiceException(() -> fileService.deleteFile(id), FILE_NOT_EXISTS);
} |
@Override
public void onDeserializationFailure(
final String source,
final String changelog,
final byte[] data
) {
// NOTE: this only happens for values, we should never auto-register key schemas
final String sourceSubject = KsqlConstants.getSRSubject(source, false);
final String changelogSubject = KsqlConstants.getSRSubject(changelog, false);
// all schema registry events start with a magic byte 0x0 and then four bytes
// indicating the schema id - we extract that schema id from the data that failed
// to deserialize and then register it into the changelog subject
final int id = ByteBuffer.wrap(data, 1, Integer.BYTES).getInt();
final SchemaRegisterEvent event = new SchemaRegisterEvent(id, sourceSubject, changelogSubject);
try {
if (!failedAttempts.contains(event)) {
LOG.info("Trying to fetch & register schema id {} under subject {}", id, changelogSubject);
final ParsedSchema schema = srClient.getSchemaBySubjectAndId(sourceSubject, id);
srClient.register(changelogSubject, schema);
}
} catch (Exception e) {
LOG.warn("Failed during deserialization callback for topic {}. "
+ "Will not try again to register id {} under subject {}.",
source,
id,
changelogSubject,
e
);
failedAttempts.add(event);
}
} | @Test
public void shouldRegisterIdFromData() throws IOException, RestClientException {
// Given:
when(srClient.getSchemaBySubjectAndId(KsqlConstants.getSRSubject(SOURCE, false), ID)).thenReturn(schema);
final RegisterSchemaCallback call = new RegisterSchemaCallback(srClient);
// When:
call.onDeserializationFailure(SOURCE, CHANGELOG, SOME_DATA);
// Then:
verify(srClient).register(KsqlConstants.getSRSubject(CHANGELOG, false), schema);
} |
public Preference<Boolean> getBoolean(@StringRes int prefKey, @BoolRes int defaultValue) {
return mRxSharedPreferences.getBoolean(
mResources.getString(prefKey), mResources.getBoolean(defaultValue));
} | @Test
public void testSetupFallbackDictionaryToFalseIfWasNotSetBefore() {
SharedPrefsHelper.setPrefsValue(RxSharedPrefs.CONFIGURATION_VERSION, 11);
SharedPreferences preferences =
PreferenceManager.getDefaultSharedPreferences(getApplicationContext());
Assert.assertFalse(preferences.contains("settings_key_always_use_fallback_user_dictionary"));
new RxSharedPrefs(getApplicationContext(), this::testRestoreFunction);
Assert.assertTrue(preferences.contains("settings_key_always_use_fallback_user_dictionary"));
Assert.assertFalse(
preferences.getBoolean("settings_key_always_use_fallback_user_dictionary", true));
} |
@Override
@Transactional
public boolean updateAfterApproval(Long userId, Integer userType, String clientId, Map<String, Boolean> requestedScopes) {
// 如果 requestedScopes 为空,说明没有要求,则返回 true 通过
if (CollUtil.isEmpty(requestedScopes)) {
return true;
}
// 更新批准的信息
boolean success = false; // 需要至少有一个同意
LocalDateTime expireTime = LocalDateTime.now().plusSeconds(TIMEOUT);
for (Map.Entry<String, Boolean> entry : requestedScopes.entrySet()) {
if (entry.getValue()) {
success = true;
}
saveApprove(userId, userType, clientId, entry.getKey(), entry.getValue(), expireTime);
}
return success;
} | @Test
public void testUpdateAfterApproval_approved() {
// 准备参数
Long userId = randomLongId();
Integer userType = randomEle(UserTypeEnum.values()).getValue();
String clientId = randomString();
Map<String, Boolean> requestedScopes = new LinkedHashMap<>(); // 有序,方便判断
requestedScopes.put("read", true);
requestedScopes.put("write", false);
// mock 方法
// 调用
boolean success = oauth2ApproveService.updateAfterApproval(userId, userType, clientId,
requestedScopes);
// 断言
assertTrue(success);
List<OAuth2ApproveDO> result = oauth2ApproveMapper.selectList();
assertEquals(2, result.size());
// read
assertEquals(userId, result.get(0).getUserId());
assertEquals(userType, result.get(0).getUserType());
assertEquals(clientId, result.get(0).getClientId());
assertEquals("read", result.get(0).getScope());
assertTrue(result.get(0).getApproved());
assertFalse(DateUtils.isExpired(result.get(0).getExpiresTime()));
// write
assertEquals(userId, result.get(1).getUserId());
assertEquals(userType, result.get(1).getUserType());
assertEquals(clientId, result.get(1).getClientId());
assertEquals("write", result.get(1).getScope());
assertFalse(result.get(1).getApproved());
assertFalse(DateUtils.isExpired(result.get(1).getExpiresTime()));
} |
@Override
public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException {
if (applicationName != null && !applicationName.isEmpty()) {
invocation.setAttachment(DubboUtils.SENTINEL_DUBBO_APPLICATION_KEY, applicationName);
}
return invoker.invoke(invocation);
} | @Test
public void testInvokeApplicationKey() {
Invoker invoker = mock(Invoker.class);
Invocation invocation = mock(Invocation.class);
URL url = URL.valueOf("test://test:111/test?application=serviceA");
when(invoker.getUrl()).thenReturn(url);
ApplicationModel applicationModel = FrameworkModel.defaultModel().newApplication();
applicationModel.getApplicationConfigManager().setApplication(new ApplicationConfig("serviceA"));
DubboAppContextFilter filter = new DubboAppContextFilter(applicationModel);
filter.invoke(invoker, invocation);
verify(invoker).invoke(invocation);
verify(invocation).setAttachment(DubboUtils.SENTINEL_DUBBO_APPLICATION_KEY, "serviceA");
applicationModel.destroy();
} |
public static FunctionConfig validateUpdate(FunctionConfig existingConfig, FunctionConfig newConfig) {
FunctionConfig mergedConfig = existingConfig.toBuilder().build();
if (!existingConfig.getTenant().equals(newConfig.getTenant())) {
throw new IllegalArgumentException("Tenants differ");
}
if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) {
throw new IllegalArgumentException("Namespaces differ");
}
if (!existingConfig.getName().equals(newConfig.getName())) {
throw new IllegalArgumentException("Function Names differ");
}
if (!StringUtils.isEmpty(newConfig.getClassName())) {
mergedConfig.setClassName(newConfig.getClassName());
}
if (!StringUtils.isEmpty(newConfig.getJar())) {
mergedConfig.setJar(newConfig.getJar());
}
if (newConfig.getInputSpecs() == null) {
newConfig.setInputSpecs(new HashMap<>());
}
if (mergedConfig.getInputSpecs() == null) {
mergedConfig.setInputSpecs(new HashMap<>());
}
if (newConfig.getInputs() != null) {
newConfig.getInputs().forEach((topicName -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder().isRegexPattern(false).build());
}));
}
if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) {
newConfig.getInputSpecs().put(newConfig.getTopicsPattern(),
ConsumerConfig.builder()
.isRegexPattern(true)
.build());
}
if (newConfig.getCustomSerdeInputs() != null) {
newConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.serdeClassName(serdeClassName)
.isRegexPattern(false)
.build());
});
}
if (newConfig.getCustomSchemaInputs() != null) {
newConfig.getCustomSchemaInputs().forEach((topicName, schemaClassname) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.schemaType(schemaClassname)
.isRegexPattern(false)
.build());
});
}
if (!newConfig.getInputSpecs().isEmpty()) {
newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> {
if (!existingConfig.getInputSpecs().containsKey(topicName)) {
throw new IllegalArgumentException("Input Topics cannot be altered");
}
if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) {
throw new IllegalArgumentException(
"isRegexPattern for input topic " + topicName + " cannot be altered");
}
mergedConfig.getInputSpecs().put(topicName, consumerConfig);
});
}
if (!StringUtils.isEmpty(newConfig.getOutputSerdeClassName()) && !newConfig.getOutputSerdeClassName()
.equals(existingConfig.getOutputSerdeClassName())) {
throw new IllegalArgumentException("Output Serde mismatch");
}
if (!StringUtils.isEmpty(newConfig.getOutputSchemaType()) && !newConfig.getOutputSchemaType()
.equals(existingConfig.getOutputSchemaType())) {
throw new IllegalArgumentException("Output Schema mismatch");
}
if (!StringUtils.isEmpty(newConfig.getLogTopic())) {
mergedConfig.setLogTopic(newConfig.getLogTopic());
}
if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees()
.equals(existingConfig.getProcessingGuarantees())) {
throw new IllegalArgumentException("Processing Guarantees cannot be altered");
}
if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering()
.equals(existingConfig.getRetainOrdering())) {
throw new IllegalArgumentException("Retain Ordering cannot be altered");
}
if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering()
.equals(existingConfig.getRetainKeyOrdering())) {
throw new IllegalArgumentException("Retain Key Ordering cannot be altered");
}
if (!StringUtils.isEmpty(newConfig.getOutput())) {
mergedConfig.setOutput(newConfig.getOutput());
}
if (newConfig.getUserConfig() != null) {
mergedConfig.setUserConfig(newConfig.getUserConfig());
}
if (newConfig.getSecrets() != null) {
mergedConfig.setSecrets(newConfig.getSecrets());
}
if (newConfig.getRuntime() != null && !newConfig.getRuntime().equals(existingConfig.getRuntime())) {
throw new IllegalArgumentException("Runtime cannot be altered");
}
if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) {
throw new IllegalArgumentException("AutoAck cannot be altered");
}
if (newConfig.getMaxMessageRetries() != null) {
mergedConfig.setMaxMessageRetries(newConfig.getMaxMessageRetries());
}
if (!StringUtils.isEmpty(newConfig.getDeadLetterTopic())) {
mergedConfig.setDeadLetterTopic(newConfig.getDeadLetterTopic());
}
if (!StringUtils.isEmpty(newConfig.getSubName()) && !newConfig.getSubName()
.equals(existingConfig.getSubName())) {
throw new IllegalArgumentException("Subscription Name cannot be altered");
}
if (newConfig.getParallelism() != null) {
mergedConfig.setParallelism(newConfig.getParallelism());
}
if (newConfig.getResources() != null) {
mergedConfig
.setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources()));
}
if (newConfig.getWindowConfig() != null) {
mergedConfig.setWindowConfig(newConfig.getWindowConfig());
}
if (newConfig.getTimeoutMs() != null) {
mergedConfig.setTimeoutMs(newConfig.getTimeoutMs());
}
if (newConfig.getCleanupSubscription() != null) {
mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription());
}
if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) {
mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags());
}
if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) {
mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions());
}
if (newConfig.getProducerConfig() != null) {
mergedConfig.setProducerConfig(newConfig.getProducerConfig());
}
return mergedConfig;
} | @Test
public void testMergeDifferentProducerConfig() {
FunctionConfig functionConfig = createFunctionConfig();
ProducerConfig producerConfig = new ProducerConfig();
producerConfig.setMaxPendingMessages(100);
producerConfig.setMaxPendingMessagesAcrossPartitions(1000);
producerConfig.setUseThreadLocalProducers(true);
producerConfig.setBatchBuilder("DEFAULT");
producerConfig.setCompressionType(CompressionType.ZLIB);
FunctionConfig newFunctionConfig = createUpdatedFunctionConfig("producerConfig", producerConfig);
FunctionConfig mergedConfig = FunctionConfigUtils.validateUpdate(functionConfig, newFunctionConfig);
assertEquals(
mergedConfig.getProducerConfig(),
producerConfig
);
mergedConfig.setProducerConfig(functionConfig.getProducerConfig());
assertEquals(
new Gson().toJson(functionConfig),
new Gson().toJson(mergedConfig)
);
} |
@Override
public void initialize(URI uri, Configuration conf)
throws IOException
{
requireNonNull(uri, "uri is null");
requireNonNull(conf, "conf is null");
super.initialize(uri, conf);
setConf(conf);
this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority());
this.workingDirectory = new Path(PATH_SEPARATOR).makeQualified(this.uri, new Path(PATH_SEPARATOR));
HiveS3Config defaults = new HiveS3Config();
this.stagingDirectory = new File(conf.get(S3_STAGING_DIRECTORY, defaults.getS3StagingDirectory().toString()));
this.maxAttempts = conf.getInt(S3_MAX_CLIENT_RETRIES, defaults.getS3MaxClientRetries()) + 1;
this.maxBackoffTime = Duration.valueOf(conf.get(S3_MAX_BACKOFF_TIME, defaults.getS3MaxBackoffTime().toString()));
this.maxRetryTime = Duration.valueOf(conf.get(S3_MAX_RETRY_TIME, defaults.getS3MaxRetryTime().toString()));
int maxErrorRetries = conf.getInt(S3_MAX_ERROR_RETRIES, defaults.getS3MaxErrorRetries());
boolean sslEnabled = conf.getBoolean(S3_SSL_ENABLED, defaults.isS3SslEnabled());
Duration connectTimeout = Duration.valueOf(conf.get(S3_CONNECT_TIMEOUT, defaults.getS3ConnectTimeout().toString()));
Duration socketTimeout = Duration.valueOf(conf.get(S3_SOCKET_TIMEOUT, defaults.getS3SocketTimeout().toString()));
int maxConnections = conf.getInt(S3_MAX_CONNECTIONS, defaults.getS3MaxConnections());
this.multiPartUploadMinFileSize = conf.getLong(S3_MULTIPART_MIN_FILE_SIZE, defaults.getS3MultipartMinFileSize().toBytes());
this.multiPartUploadMinPartSize = conf.getLong(S3_MULTIPART_MIN_PART_SIZE, defaults.getS3MultipartMinPartSize().toBytes());
this.isPathStyleAccess = conf.getBoolean(S3_PATH_STYLE_ACCESS, defaults.isS3PathStyleAccess());
this.useInstanceCredentials = conf.getBoolean(S3_USE_INSTANCE_CREDENTIALS, defaults.isS3UseInstanceCredentials());
this.pinS3ClientToCurrentRegion = conf.getBoolean(S3_PIN_CLIENT_TO_CURRENT_REGION, defaults.isPinS3ClientToCurrentRegion());
this.s3IamRole = conf.get(S3_IAM_ROLE, defaults.getS3IamRole());
this.s3IamRoleSessionName = conf.get(S3_IAM_ROLE_SESSION_NAME, defaults.getS3IamRoleSessionName());
verify(!(useInstanceCredentials && conf.get(S3_IAM_ROLE) != null),
"Invalid configuration: either use instance credentials or specify an iam role");
verify((pinS3ClientToCurrentRegion && conf.get(S3_ENDPOINT) == null) || !pinS3ClientToCurrentRegion,
"Invalid configuration: either endpoint can be set or S3 client can be pinned to the current region");
this.sseEnabled = conf.getBoolean(S3_SSE_ENABLED, defaults.isS3SseEnabled());
this.sseType = PrestoS3SseType.valueOf(conf.get(S3_SSE_TYPE, defaults.getS3SseType().name()));
this.sseKmsKeyId = conf.get(S3_SSE_KMS_KEY_ID, defaults.getS3SseKmsKeyId());
this.s3AclType = PrestoS3AclType.valueOf(conf.get(S3_ACL_TYPE, defaults.getS3AclType().name()));
String userAgentPrefix = conf.get(S3_USER_AGENT_PREFIX, defaults.getS3UserAgentPrefix());
this.skipGlacierObjects = conf.getBoolean(S3_SKIP_GLACIER_OBJECTS, defaults.isSkipGlacierObjects());
this.s3StorageClass = conf.getEnum(S3_STORAGE_CLASS, defaults.getS3StorageClass());
ClientConfiguration configuration = new ClientConfiguration()
.withMaxErrorRetry(maxErrorRetries)
.withProtocol(sslEnabled ? Protocol.HTTPS : Protocol.HTTP)
.withConnectionTimeout(toIntExact(connectTimeout.toMillis()))
.withSocketTimeout(toIntExact(socketTimeout.toMillis()))
.withMaxConnections(maxConnections)
.withUserAgentPrefix(userAgentPrefix)
.withUserAgentSuffix(S3_USER_AGENT_SUFFIX);
this.credentialsProvider = createAwsCredentialsProvider(uri, conf);
this.s3 = createAmazonS3Client(conf, configuration);
} | @Test
public void testStaticCredentials()
throws Exception
{
Configuration config = new Configuration();
config.set(S3_ACCESS_KEY, "test_secret_access_key");
config.set(S3_SECRET_KEY, "test_access_key_id");
// the static credentials should be preferred
try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) {
fs.initialize(new URI("s3n://test-bucket/"), config);
assertInstanceOf(getAwsCredentialsProvider(fs), AWSStaticCredentialsProvider.class);
}
} |
public int generate(Class<? extends CustomResource> crdClass, Writer out) throws IOException {
ObjectNode node = nf.objectNode();
Crd crd = crdClass.getAnnotation(Crd.class);
if (crd == null) {
err(crdClass + " is not annotated with @Crd");
} else {
node.put("apiVersion", "apiextensions.k8s.io/" + crdApiVersion)
.put("kind", "CustomResourceDefinition")
.putObject("metadata")
.put("name", crd.spec().names().plural() + "." + crd.spec().group());
if (!labels.isEmpty()) {
((ObjectNode) node.get("metadata"))
.putObject("labels")
.setAll(labels.entrySet().stream()
.collect(Collectors.<Map.Entry<String, String>, String, JsonNode, LinkedHashMap<String, JsonNode>>toMap(
Map.Entry::getKey,
e -> new TextNode(
e.getValue()
.replace("%group%", crd.spec().group())
.replace("%plural%", crd.spec().names().plural())
.replace("%singular%", crd.spec().names().singular())),
(x, y) -> x,
LinkedHashMap::new)));
}
node.set("spec", buildSpec(crdApiVersion, crd.spec(), crdClass));
}
mapper.writeValue(out, node);
return numErrors;
} | @Test
void simpleTestWithoutDescriptions() throws IOException {
CrdGenerator crdGenerator = new CrdGenerator(KubeVersion.V1_16_PLUS, ApiVersion.V1, CrdGenerator.YAML_MAPPER,
emptyMap(), crdGeneratorReporter, emptyList(), null, null,
new CrdGenerator.NoneConversionStrategy(), ApiVersion.parseRange("v1+"));
StringWriter w = new StringWriter();
crdGenerator.generate(ExampleCrd.class, w);
String s = w.toString();
assertTrue(errors.isEmpty(), "CrdGenerator should not report any errors: " + errors);
assertEquals(CrdTestUtils.readResource("simpleTestWithoutDescriptions.yaml"), s);
} |
@Override
public Map<String, JWK> getAllPublicKeys() {
Map<String, JWK> pubKeys = new HashMap<>();
// pull out all public keys
for (String keyId : keys.keySet()) {
JWK key = keys.get(keyId);
JWK pub = key.toPublicJWK();
if (pub != null) {
pubKeys.put(keyId, pub);
}
}
return pubKeys;
} | @Test
public void getAllPubKeys() throws ParseException {
Map<String,JWK> keys2check = service_2.getAllPublicKeys();
assertEquals(
JSONObjectUtils.getString(RSAjwk.toPublicJWK().toJSONObject(), "e"),
JSONObjectUtils.getString(keys2check.get(RSAkid).toJSONObject(), "e")
);
assertEquals(
JSONObjectUtils.getString(RSAjwk_2.toPublicJWK().toJSONObject(), "e"),
JSONObjectUtils.getString(keys2check.get(RSAkid_2).toJSONObject(), "e")
);
assertTrue(service_3.getAllPublicKeys().isEmpty());
} |
@Override
public Serde.Deserializer deserializer(String topic, Serde.Target type) {
return new Serde.Deserializer() {
@SneakyThrows
@Override
public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
try {
UnknownFieldSet unknownFields = UnknownFieldSet.parseFrom(data);
return new DeserializeResult(unknownFields.toString(), DeserializeResult.Type.STRING, Map.of());
} catch (Exception e) {
throw new ValidationException(e.getMessage());
}
}
};
} | @Test
void deserializeSimpleMessage() {
var deserialized = serde.deserializer(DUMMY_TOPIC, Serde.Target.VALUE)
.deserialize(null, getProtobufMessage());
assertThat(deserialized.getResult()).isEqualTo("1: 5\n");
} |
public CommandLine getCommandLine() {
return commandLine;
} | @SuppressWarnings("static-access")
@Test
public void testCreateWithOptions() throws Exception {
// Create new option newOpt
Option opt = Option.builder("newOpt").argName("int")
.hasArg()
.desc("A new option")
.build();
Options opts = new Options();
opts.addOption(opt);
// Check newOpt is actually used to parse the args
String[] args = new String[2];
args[0] = "--newOpt";
args[1] = "7";
GenericOptionsParser g = new GenericOptionsParser(opts, args);
assertEquals("New option was ignored",
"7", g.getCommandLine().getOptionValues("newOpt")[0]);
} |
@Override
public org.apache.kafka.streams.kstream.Transformer<KIn, VIn, Iterable<KeyValue<KOut, VOut>>> get() {
return new org.apache.kafka.streams.kstream.Transformer<KIn, VIn, Iterable<KeyValue<KOut, VOut>>>() {
private final org.apache.kafka.streams.kstream.Transformer<KIn, VIn, KeyValue<KOut, VOut>> transformer = transformerSupplier.get();
@Override
public void init(final ProcessorContext context) {
transformer.init(context);
}
@Override
public Iterable<KeyValue<KOut, VOut>> transform(final KIn key, final VIn value) {
final KeyValue<KOut, VOut> pair = transformer.transform(key, value);
if (pair != null) {
return Collections.singletonList(pair);
}
return Collections.emptyList();
}
@Override
public void close() {
transformer.close();
}
};
} | @Test
public void shouldAlwaysGetNewAdapterTransformer() {
@SuppressWarnings("unchecked")
final org.apache.kafka.streams.kstream.Transformer<String, String, KeyValue<Integer, Integer>> transformer1 =
mock(org.apache.kafka.streams.kstream.Transformer.class);
@SuppressWarnings("unchecked")
final org.apache.kafka.streams.kstream.Transformer<String, String, KeyValue<Integer, Integer>> transformer2 =
mock(org.apache.kafka.streams.kstream.Transformer.class);
@SuppressWarnings("unchecked")
final org.apache.kafka.streams.kstream.Transformer<String, String, KeyValue<Integer, Integer>> transformer3 =
mock(org.apache.kafka.streams.kstream.Transformer.class);
when(transformerSupplier.get()).thenReturn(transformer1).thenReturn(transformer2).thenReturn(transformer3);
final TransformerSupplierAdapter<String, String, Integer, Integer> adapter =
new TransformerSupplierAdapter<>(transformerSupplier);
final org.apache.kafka.streams.kstream.Transformer<String, String, Iterable<KeyValue<Integer, Integer>>> adapterTransformer1 = adapter.get();
adapterTransformer1.init(context);
final org.apache.kafka.streams.kstream.Transformer<String, String, Iterable<KeyValue<Integer, Integer>>> adapterTransformer2 = adapter.get();
adapterTransformer2.init(context);
final org.apache.kafka.streams.kstream.Transformer<String, String, Iterable<KeyValue<Integer, Integer>>> adapterTransformer3 = adapter.get();
adapterTransformer3.init(context);
assertThat(adapterTransformer1, not(sameInstance(adapterTransformer2)));
assertThat(adapterTransformer2, not(sameInstance(adapterTransformer3)));
assertThat(adapterTransformer3, not(sameInstance(adapterTransformer1)));
verify(transformer1).init(context);
verify(transformer2).init(context);
verify(transformer3).init(context);
} |
@Override
public DataType getType() {
return DataType.URI;
} | @Test
public void testGetType() {
assertEquals(DataType.URI, uriRegisterExecutorSubscriber.getType());
} |
@Override
public String getParamDesc() {
return paramDesc;
} | @Test
void getParamDesc() {
Assertions.assertEquals(ReflectUtils.getDesc(String.class), method.getParamDesc());
} |
public static Ip4Prefix valueOf(int address, int prefixLength) {
return new Ip4Prefix(Ip4Address.valueOf(address), prefixLength);
} | @Test
public void testValueOfAddressIPv4() {
Ip4Address ipAddress;
Ip4Prefix ipPrefix;
ipAddress = Ip4Address.valueOf("1.2.3.4");
ipPrefix = Ip4Prefix.valueOf(ipAddress, 24);
assertThat(ipPrefix.toString(), is("1.2.3.0/24"));
ipPrefix = Ip4Prefix.valueOf(ipAddress, 32);
assertThat(ipPrefix.toString(), is("1.2.3.4/32"));
ipAddress = Ip4Address.valueOf("1.2.3.5");
ipPrefix = Ip4Prefix.valueOf(ipAddress, 32);
assertThat(ipPrefix.toString(), is("1.2.3.5/32"));
ipAddress = Ip4Address.valueOf("0.0.0.0");
ipPrefix = Ip4Prefix.valueOf(ipAddress, 0);
assertThat(ipPrefix.toString(), is("0.0.0.0/0"));
ipPrefix = Ip4Prefix.valueOf(ipAddress, 32);
assertThat(ipPrefix.toString(), is("0.0.0.0/32"));
ipAddress = Ip4Address.valueOf("255.255.255.255");
ipPrefix = Ip4Prefix.valueOf(ipAddress, 0);
assertThat(ipPrefix.toString(), is("0.0.0.0/0"));
ipPrefix = Ip4Prefix.valueOf(ipAddress, 16);
assertThat(ipPrefix.toString(), is("255.255.0.0/16"));
ipPrefix = Ip4Prefix.valueOf(ipAddress, 32);
assertThat(ipPrefix.toString(), is("255.255.255.255/32"));
} |
public static Correspondence<Number, Number> tolerance(double tolerance) {
return new TolerantNumericEquality(tolerance);
} | @Test
public void testTolerance_viaIterableSubjectContains_success() {
assertThat(ImmutableList.of(1.02, 2.04, 3.08))
.comparingElementsUsing(tolerance(0.05))
.contains(2.0);
} |
public static int[] rowMin(int[][] matrix) {
int[] x = new int[matrix.length];
for (int i = 0; i < x.length; i++) {
x[i] = min(matrix[i]);
}
return x;
} | @Test
public void testRowMin() {
System.out.println("rowMin");
double[][] A = {
{0.7220180, 0.07121225, 0.6881997},
{-0.2648886, -0.89044952, 0.3700456},
{-0.6391588, 0.44947578, 0.6240573}
};
double[] r = {0.07121225, -0.89044952, -0.6391588};
double[] result = MathEx.rowMin(A);
for (int i = 0; i < r.length; i++) {
assertEquals(result[i], r[i], 1E-7);
}
} |
public void write(MemoryBuffer buffer, Locale l) {
fury.writeJavaString(buffer, l.getLanguage());
fury.writeJavaString(buffer, l.getCountry());
fury.writeJavaString(buffer, l.getVariant());
} | @Test(dataProvider = "furyCopyConfig")
public void testWrite(Fury fury) {
copyCheckWithoutSame(fury, Locale.US);
copyCheckWithoutSame(fury, Locale.CHINESE);
copyCheckWithoutSame(fury, Locale.ENGLISH);
copyCheckWithoutSame(fury, Locale.TRADITIONAL_CHINESE);
copyCheckWithoutSame(fury, Locale.CHINA);
copyCheckWithoutSame(fury, Locale.TAIWAN);
copyCheckWithoutSame(fury, Locale.getDefault());
} |
public static String createGPX(InstructionList instructions, String trackName, long startTimeMillis, boolean includeElevation, boolean withRoute, boolean withTrack, boolean withWayPoints, String version, Translation tr) {
DateFormat formatter = Helper.createFormatter();
DecimalFormat decimalFormat = new DecimalFormat("#", DecimalFormatSymbols.getInstance(Locale.ROOT));
decimalFormat.setMinimumFractionDigits(1);
decimalFormat.setMaximumFractionDigits(6);
decimalFormat.setMinimumIntegerDigits(1);
String header = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\" ?>"
+ "<gpx xmlns=\"http://www.topografix.com/GPX/1/1\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " creator=\"Graphhopper version " + version + "\" version=\"1.1\""
// This xmlns:gh acts only as ID, no valid URL necessary.
// Use a separate namespace for custom extensions to make basecamp happy.
+ " xmlns:gh=\"https://graphhopper.com/public/schema/gpx/1.1\">"
+ "\n<metadata>"
+ "<copyright author=\"OpenStreetMap contributors\"/>"
+ "<link href=\"http://graphhopper.com\">"
+ "<text>GraphHopper GPX</text>"
+ "</link>"
+ "<time>" + formatter.format(startTimeMillis) + "</time>"
+ "</metadata>";
StringBuilder gpxOutput = new StringBuilder(header);
if (!instructions.isEmpty()) {
if (withWayPoints) {
createWayPointBlock(gpxOutput, instructions.get(0), decimalFormat, tr); // Start
for (Instruction currInstr : instructions) {
if ((currInstr.getSign() == Instruction.REACHED_VIA) // Via
|| (currInstr.getSign() == Instruction.FINISH)) // End
{
createWayPointBlock(gpxOutput, currInstr, decimalFormat, tr);
}
}
}
if (withRoute) {
gpxOutput.append("\n<rte>");
Instruction nextInstr = null;
for (Instruction currInstr : instructions) {
if (null != nextInstr)
createRteptBlock(gpxOutput, nextInstr, currInstr, decimalFormat, tr);
nextInstr = currInstr;
}
createRteptBlock(gpxOutput, nextInstr, null, decimalFormat, tr);
gpxOutput.append("\n</rte>");
}
}
if (withTrack) {
gpxOutput.append("\n<trk><name>").append(trackName).append("</name>");
gpxOutput.append("<trkseg>");
for (GPXEntry entry : createGPXList(instructions)) {
gpxOutput.append("\n<trkpt lat=\"").append(decimalFormat.format(entry.getPoint().getLat()));
gpxOutput.append("\" lon=\"").append(decimalFormat.format(entry.getPoint().getLon())).append("\">");
if (includeElevation)
gpxOutput.append("<ele>").append(Helper.round2(((GHPoint3D) entry.getPoint()).getEle())).append("</ele>");
if (entry.getTime() != null)
gpxOutput.append("<time>").append(formatter.format(startTimeMillis + entry.getTime())).append("</time>");
gpxOutput.append("</trkpt>");
}
gpxOutput.append("\n</trkseg>");
gpxOutput.append("\n</trk>");
}
// we could now use 'wpt' for via points
gpxOutput.append("\n</gpx>");
return gpxOutput.toString();
} | @Test
public void testInstructionsWithTimeAndPlace() {
BaseGraph g = new BaseGraph.Builder(carManager).create();
// n-4-5 (n: pillar node)
// |
// 7-3-2-6
// |
// 1
NodeAccess na = g.getNodeAccess();
na.setNode(1, 15.0, 10);
na.setNode(2, 15.1, 10);
na.setNode(3, 15.1, 9.9);
na.setNode(4, 15.2, 9.9);
na.setNode(5, 15.2, 10);
na.setNode(6, 15.1, 10.1);
na.setNode(7, 15.1, 9.8);
g.edge(1, 2).set(speedEnc, 63).setDistance(7000).setKeyValues(Map.of(STREET_NAME, new KValue("1-2")));
g.edge(2, 3).set(speedEnc, 72).setDistance(8000).setKeyValues(Map.of(STREET_NAME, new KValue("2-3")));
g.edge(2, 6).set(speedEnc, 9).setDistance(10000).setKeyValues(Map.of(STREET_NAME, new KValue("2-6")));
g.edge(3, 4).set(speedEnc, 81).setDistance(9000).setKeyValues(Map.of(STREET_NAME, new KValue("3-4")));
g.edge(3, 7).set(speedEnc, 9).setDistance(10000).setKeyValues(Map.of(STREET_NAME, new KValue("3-7")));
g.edge(4, 5).set(speedEnc, 90).setDistance(10000).setKeyValues(Map.of(STREET_NAME, new KValue("4-5")));
Weighting weighting = new SpeedWeighting(speedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, g, weighting, carManager, trMap.getWithFallBack(Locale.US));
PointList points = p.calcPoints();
assertEquals(4, wayList.size());
assertEquals(34000, p.getDistance(), 1e-1);
assertEquals(34000, sumDistances(wayList), 1e-1);
assertEquals(5, points.size());
assertEquals(445588, p.getTime());
assertEquals(Instruction.CONTINUE_ON_STREET, wayList.get(0).getSign());
assertEquals("1-2", wayList.get(0).getName());
assertEquals(15, wayList.get(0).getPoints().getLat(0), 1e-3);
assertEquals(10, wayList.get(0).getPoints().getLon(0), 1e-3);
assertEquals(Instruction.TURN_LEFT, wayList.get(1).getSign());
assertEquals(15.1, wayList.get(1).getPoints().getLat(0), 1e-3);
assertEquals(10, wayList.get(1).getPoints().getLon(0), 1e-3);
assertEquals(Instruction.TURN_RIGHT, wayList.get(2).getSign());
assertEquals(15.1, wayList.get(2).getPoints().getLat(0), 1e-3);
assertEquals(9.9, wayList.get(2).getPoints().getLon(0), 1e-3);
String gpxStr = GpxConversions.createGPX(wayList, "test", (long) 0, false, true, true, true, Constants.VERSION, trMap.getWithFallBack(Locale.US));
verifyGPX(gpxStr);
// System.out.println(gpxStr);
assertTrue(gpxStr.contains("<trkpt lat=\"15.0\" lon=\"10.0\"><time>1970-01-01T00:00:00Z</time>"), gpxStr);
assertTrue(gpxStr.contains("<extensions>") && gpxStr.contains("</extensions>"), gpxStr);
assertTrue(gpxStr.contains("<rtept lat=\"15.1\" lon=\"10.0\">"), gpxStr);
assertTrue(gpxStr.contains("<gh:distance>8000.0</gh:distance>"), gpxStr);
assertTrue(gpxStr.contains("<desc>turn left onto 2-3</desc>"), gpxStr);
assertTrue(gpxStr.contains("<gh:sign>-2</gh:sign>"), gpxStr);
assertTrue(gpxStr.contains("<gh:direction>N</gh:direction>"), gpxStr);
assertTrue(gpxStr.contains("<gh:azimuth>0.0</gh:azimuth>"), gpxStr);
assertFalse(gpxStr.contains("NaN"));
} |
public void errorFromOrigin(final Throwable ex) {
try {
// Flag that there was an origin server related error for the loadbalancer to choose
// whether to circuit-trip this server.
if (originConn != null) {
// NOTE: if originConn is null, then these stats will have been incremented within
// PerServerConnectionPool
// so don't need to be here.
originConn.getServer().incrementSuccessiveConnectionFailureCount();
originConn.getServer().addToFailureCount();
originConn.flagShouldClose();
}
// detach from current origin
final Channel originCh = unlinkFromOrigin();
methodBinding.bind(() -> processErrorFromOrigin(ex, originCh));
} catch (Exception e) {
channelCtx.fireExceptionCaught(ex);
}
} | @Test
public void onErrorFromOriginNoRetryAdjustment() {
doReturn(OutboundErrorType.RESET_CONNECTION).when(attemptFactory).mapNettyToOutboundErrorType(any());
proxyEndpoint.errorFromOrigin(new RuntimeException());
verify(nettyOrigin).adjustRetryPolicyIfNeeded(request);
verify(nettyOrigin).connectToOrigin(any(), any(), anyInt(), any(), any(), any());
} |
public void setSelector(String selector) {
this.selector = selector;
} | @Test
void testSerialize() throws JsonProcessingException {
ServiceListRequest request = new ServiceListRequest(NAMESPACE, GROUP, 1, 10);
request.setSelector("label");
String json = mapper.writeValueAsString(request);
assertTrue(json.contains("\"groupName\":\"" + GROUP + "\""));
assertTrue(json.contains("\"namespace\":\"" + NAMESPACE + "\""));
assertTrue(json.contains("\"module\":\"" + NAMING_MODULE + "\""));
assertTrue(json.contains("\"selector\":\"label\""));
assertTrue(json.contains("\"pageNo\":1"));
assertTrue(json.contains("\"pageSize\":10"));
} |
@Override
public RestLiResponseData<CreateResponseEnvelope> buildRestLiResponseData(Request request,
RoutingResult routingResult,
Object result,
Map<String, String> headers,
List<HttpCookie> cookies)
{
CreateResponse createResponse = (CreateResponse) result;
boolean isGetAfterCreate = createResponse instanceof CreateKVResponse;
if (createResponse.hasError())
{
RestLiServiceException exception = createResponse.getError();
return new RestLiResponseDataImpl<>(new CreateResponseEnvelope(exception, isGetAfterCreate), headers, cookies);
}
Object id = null;
if (createResponse.hasId())
{
id = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(createResponse.getId(), routingResult);
final ProtocolVersion protocolVersion = routingResult.getContext().getRestliProtocolVersion();
String stringKey = URIParamUtils.encodeKeyForUri(id, UriComponent.Type.PATH_SEGMENT, protocolVersion);
UriBuilder uribuilder = UriBuilder.fromUri(request.getURI());
uribuilder.path(stringKey);
uribuilder.replaceQuery(null);
if (routingResult.getContext().hasParameter(RestConstants.ALT_KEY_PARAM))
{
// add altkey param to location URI
uribuilder.queryParam(RestConstants.ALT_KEY_PARAM, routingResult.getContext().getParameter(RestConstants.ALT_KEY_PARAM));
}
headers.put(RestConstants.HEADER_LOCATION, uribuilder.build((Object) null).toString());
headers.put(HeaderUtil.getIdHeaderName(protocolVersion), URIParamUtils.encodeKeyForHeader(id, protocolVersion));
}
// Verify that a null status was not passed into the CreateResponse. If so, this is a developer error.
if (createResponse.getStatus() == null)
{
throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR,
"Unexpected null encountered. HttpStatus is null inside of a CreateResponse from the resource method: "
+ routingResult.getResourceMethod());
}
final ResourceContext resourceContext = routingResult.getContext();
RecordTemplate idResponse;
if (createResponse instanceof CreateKVResponse && resourceContext.isReturnEntityRequested())
{
RecordTemplate entity = ((CreateKVResponse<?, ?>) createResponse).getEntity();
// Verify that a null entity was not passed into the CreateKVResponse. If so, this is a developer error.
if (entity == null)
{
throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR,
"Unexpected null encountered. Entity is null inside of a CreateKVResponse when the entity should be returned. In resource method: " + routingResult.getResourceMethod());
}
DataMap entityData = entity.data();
TimingContextUtil.beginTiming(resourceContext.getRawRequestContext(),
FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key());
final DataMap data = RestUtils.projectFields(entityData, resourceContext);
TimingContextUtil.endTiming(resourceContext.getRawRequestContext(),
FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key());
idResponse = new AnyRecord(data);
// Ideally, we should set an IdEntityResponse to the envelope. But we are keeping AnyRecord
// to make sure the runtime object is backwards compatible.
// idResponse = new IdEntityResponse<>(id, new AnyRecord(data));
}
else //Instance of idResponse
{
idResponse = new IdResponse<>(id);
}
return new RestLiResponseDataImpl<>(new CreateResponseEnvelope(createResponse.getStatus(), idResponse, isGetAfterCreate), headers, cookies);
} | @Test
public void testCreateResponseException() throws URISyntaxException
{
CreateResponse createResponse = new CreateResponse(new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST));
RestRequest restRequest = new RestRequestBuilder(new URI("/foo")).build();
RestLiResponseData<?> envelope = new CreateResponseBuilder()
.buildRestLiResponseData(restRequest, null, createResponse, Collections.emptyMap(),
Collections.emptyList());
Assert.assertTrue(envelope.getResponseEnvelope().isErrorResponse());
} |
public static String pretreatStatement(Executor executor, String statement) {
statement = SqlUtil.removeNote(statement);
if (executor.isUseSqlFragment()) {
statement = executor.getVariableManager().parseVariable(statement);
}
return statement.trim();
} | @Test
public void replaceFragmentTest() {
String statement = "nullif1:=NULLIF(1, 0) as val;\n"
+ "nullif2:=NULLIF(0, 0) as val$null;\n"
+ "select ${nullif1},${nullif2}";
String pretreatStatement = FlinkInterceptor.pretreatStatement(ExecutorFactory.getDefaultExecutor(), statement);
Assert.assertEquals("select NULLIF(1, 0) as val,NULLIF(0, 0) as val$null", pretreatStatement);
} |
public CompletableFuture<Void> compensate(URL lra, Exchange exchange) {
HttpRequest request = prepareRequest(URI.create(lra.toString() + COORDINATOR_PATH_CANCEL), exchange)
.setHeader(CONTENT_TYPE, TEXT_PLAIN_CONTENT)
.PUT(HttpRequest.BodyPublishers.ofString(""))
.build();
CompletableFuture<HttpResponse<String>> future = client.sendAsync(request, HttpResponse.BodyHandlers.ofString());
return future.thenApply(response -> {
if (response.statusCode() != HttpURLConnection.HTTP_OK) {
throw new RuntimeCamelException("Cannot compensate LRA");
}
return null;
});
} | @DisplayName("Tests whether LRAClient is calling prepareRequest with exchange from compensate()")
@Test
void testCallsPrepareRequestWithExchangeInCompensate() throws MalformedURLException {
LRASagaService sagaService = new LRASagaService();
applyMockProperties(sagaService);
LRAClient client = new LRAClient(sagaService) {
protected HttpRequest.Builder prepareRequest(URI uri, Exchange exchange) {
throw new ExchangeRuntimeException(exchange);
}
};
Exchange exchange = Mockito.mock(Exchange.class);
Assertions.assertThrows(ExchangeRuntimeException.class,
() -> client.compensate(new URL("https://localhost/saga"), exchange));
} |
public void cacheSelectData(final SelectorData selectorData) {
Optional.ofNullable(selectorData).ifPresent(this::selectorAccept);
} | @Test
public void testCacheSelectData() throws NoSuchFieldException, IllegalAccessException {
SelectorData firstCachedSelectorData = SelectorData.builder().id("1").pluginName(mockPluginName1).sort(1).build();
BaseDataCache.getInstance().cacheSelectData(firstCachedSelectorData);
ConcurrentHashMap<String, List<SelectorData>> selectorMap = getFieldByName(selectorMapStr);
assertEquals(Lists.newArrayList(firstCachedSelectorData), selectorMap.get(mockPluginName1));
SelectorData secondCachedSelectorData = SelectorData.builder().id("2").pluginName(mockPluginName1).sort(2).build();
BaseDataCache.getInstance().cacheSelectData(secondCachedSelectorData);
assertEquals(Lists.newArrayList(firstCachedSelectorData, secondCachedSelectorData), selectorMap.get(mockPluginName1));
} |
public void loadAccessData() {
getControls();
pushCache();
Object key = connectionBox.getSelectedItem();
// Nothing selected yet...
if ( key == null ) {
key = connectionMap.firstKey();
connectionBox.setSelectedItem( key );
return;
}
DatabaseInterface database = connectionMap.get( key );
int[] acc = database.getAccessTypeList();
Object accessKey = accessBox.getSelectedItem();
accessBox.removeItems();
// Add those access types applicable to this conneciton type
for ( int value : acc ) {
accessBox.addItem( DatabaseMeta.getAccessTypeDescLong( value ) );
}
// HACK: Need to force height of list control, as it does not behave
// well when using relative layouting
accessBox.setRows( accessBox.getRows() );
// May not exist for this connection type.
if ( accessKey != null ) { // This check keeps the SwtListbox from complaining about a null value
accessBox.setSelectedItem( accessKey );
}
// Last resort, set first as default
if ( accessBox.getSelectedItem() == null ) {
accessBox.setSelectedItem( DatabaseMeta.getAccessTypeDescLong( acc[ 0 ] ) );
}
Map<String, String> options = null;
if ( this.databaseMeta != null ) {
// Apply defaults to meta if set (only current db type will be displayed)
this.databaseMeta.applyDefaultOptions( database );
options = this.databaseMeta.getExtraOptions();
} else {
// Otherwise clear and display defaults directly
clearOptionsData();
options = database.getDefaultOptions();
}
setOptionsData( options );
PartitionDatabaseMeta[] clusterInfo = null;
if ( this.databaseMeta != null ) {
clusterInfo = this.databaseMeta.getPartitioningInformation();
}
setClusterData( clusterInfo );
popCache();
} | @Test
public void testLoadAccessData() throws Exception {
when( accessBox.getSelectedItem() ).thenReturn( "Native" );
DatabaseInterface dbInterface = mock( DatabaseInterface.class );
when( dbInterface.getDefaultDatabasePort() ).thenReturn( 5309 );
DataHandler.connectionMap.put( "myDb", dbInterface );
dataHandler.loadAccessData();
// Should immediately return if called again since the connectionBox will have been loaded
dataHandler.loadAccessData();
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.