focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
static <T> Set<T> findMissing(Set<T> toFind, Set<T> toSearch) {
Set<T> ret = new LinkedHashSet<>();
for (T toFindItem: toFind) {
if (!toSearch.contains(toFindItem)) {
ret.add(toFindItem);
}
}
return ret;
} | @Test
public void testFindMissing() {
TopicPartition foo0 = new TopicPartition("foo", 0);
TopicPartition foo1 = new TopicPartition("foo", 1);
TopicPartition bar0 = new TopicPartition("bar", 0);
TopicPartition bar1 = new TopicPartition("bar", 1);
TopicPartition baz0 = new TopicPartition("baz", 0);
TopicPartition baz1 = new TopicPartition("baz", 1);
assertEquals(toSet(), FetchSessionHandler.findMissing(toSet(foo0), toSet(foo0)));
assertEquals(toSet(foo0), FetchSessionHandler.findMissing(toSet(foo0), toSet(foo1)));
assertEquals(toSet(foo0, foo1),
FetchSessionHandler.findMissing(toSet(foo0, foo1), toSet(baz0)));
assertEquals(toSet(bar1, foo0, foo1),
FetchSessionHandler.findMissing(toSet(foo0, foo1, bar0, bar1),
toSet(bar0, baz0, baz1)));
assertEquals(toSet(),
FetchSessionHandler.findMissing(toSet(foo0, foo1, bar0, bar1, baz1),
toSet(foo0, foo1, bar0, bar1, baz0, baz1)));
} |
public static <T> RetryTransformer<T> of(Retry retry) {
return new RetryTransformer<>(retry);
} | @Test
public void retryOnResultUsingSingle() throws InterruptedException {
RetryConfig config = RetryConfig.<String>custom()
.retryOnResult("retry"::equals)
.waitDuration(Duration.ofMillis(50))
.maxAttempts(3).build();
Retry retry = Retry.of("testName", config);
given(helloWorldService.returnHelloWorld())
.willReturn("retry")
.willReturn("success");
Single.fromCallable(helloWorldService::returnHelloWorld)
.compose(RetryTransformer.of(retry))
.test()
.await()
.assertValueCount(1)
.assertValue("success")
.assertComplete();
then(helloWorldService).should(times(2)).returnHelloWorld();
Retry.Metrics metrics = retry.getMetrics();
assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero();
assertThat(metrics.getNumberOfSuccessfulCallsWithRetryAttempt()).isEqualTo(1);
} |
public static byte[] zlib(String content, String charset, int level) {
return zlib(StrUtil.bytes(content, charset), level);
} | @Test
public void zlibTest() {
final String data = "我是一个需要压缩的很长很长的字符串";
final byte[] bytes = StrUtil.utf8Bytes(data);
byte[] gzip = ZipUtil.zlib(bytes, 0);
//保证zlib长度正常
assertEquals(62, gzip.length);
final byte[] unGzip = ZipUtil.unZlib(gzip);
//保证正常还原
assertEquals(data, StrUtil.utf8Str(unGzip));
gzip = ZipUtil.zlib(bytes, 9);
//保证zlib长度正常
assertEquals(56, gzip.length);
final byte[] unGzip2 = ZipUtil.unZlib(gzip);
//保证正常还原
assertEquals(data, StrUtil.utf8Str(unGzip2));
} |
@Override
public boolean archive(String fileName, byte[] data) {
checkArgument(!Strings.isNullOrEmpty(fileName));
checkNotNull(data);
try {
logger.atInfo().log("Archiving data to file system with filename '%s'.", fileName);
Files.asByteSink(new File(fileName)).write(data);
return true;
} catch (IOException e) {
logger.atWarning().withCause(e).log("Failed archiving data to file '%s'.", fileName);
return false;
}
} | @Test
public void archive_whenValidTargetFileAndByteArrayData_archivesGivenDataWithGivenName()
throws IOException {
File tempFile = temporaryFolder.newFile();
byte[] data = newPreFilledByteArray(200);
RawFileArchiver rawFileArchiver = new RawFileArchiver();
assertThat(rawFileArchiver.archive(tempFile.getAbsolutePath(), data)).isTrue();
assertThat(Files.toByteArray(tempFile)).isEqualTo(data);
} |
@Override
public long getDictDataCountByDictType(String dictType) {
return dictDataMapper.selectCountByDictType(dictType);
} | @Test
public void testGetDictDataCountByDictType() {
// mock 数据
dictDataMapper.insert(randomDictDataDO(o -> o.setDictType("yunai")));
dictDataMapper.insert(randomDictDataDO(o -> o.setDictType("tudou")));
dictDataMapper.insert(randomDictDataDO(o -> o.setDictType("yunai")));
// 准备参数
String dictType = "yunai";
// 调用
long count = dictDataService.getDictDataCountByDictType(dictType);
// 校验
assertEquals(2L, count);
} |
@Override
public String resolve(Method method, Object[] arguments, String spelExpression) {
if (StringUtils.isEmpty(spelExpression)) {
return spelExpression;
}
if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) {
return stringValueResolver.resolveStringValue(spelExpression);
}
if (spelExpression.matches(METHOD_SPEL_REGEX)) {
SpelRootObject rootObject = new SpelRootObject(method, arguments);
MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer);
Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext);
return (String) evaluated;
}
if (spelExpression.matches(BEAN_SPEL_REGEX)) {
SpelRootObject rootObject = new SpelRootObject(method, arguments);
MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer);
evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory));
Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext);
return (String) evaluated;
}
return spelExpression;
} | @Test
public void placeholderSpelTest2() throws Exception {
String testExpression = "${property:default}";
DefaultSpelResolverTest target = new DefaultSpelResolverTest();
Method testMethod = target.getClass().getMethod("testMethod", String.class);
String result = sut.resolve(testMethod, new Object[]{}, testExpression);
assertThat(result).isEqualTo("backend");
} |
@Override
public void close() {
close(Duration.ofMillis(DEFAULT_CLOSE_TIMEOUT_MS));
} | @Test
public void testVerifyApplicationEventOnShutdown() {
consumer = newConsumer();
completeUnsubscribeApplicationEventSuccessfully();
doReturn(null).when(applicationEventHandler).addAndGet(any());
consumer.close();
verify(applicationEventHandler).add(any(UnsubscribeEvent.class));
verify(applicationEventHandler).add(any(CommitOnCloseEvent.class));
} |
@Override
public AuthenticationToken authenticate(HttpServletRequest request,
final HttpServletResponse response)
throws IOException, AuthenticationException {
// If the request servlet path is in the whitelist,
// skip Kerberos authentication and return anonymous token.
final String path = request.getServletPath();
for(final String endpoint: whitelist) {
if (endpoint.equals(path)) {
return AuthenticationToken.ANONYMOUS;
}
}
AuthenticationToken token = null;
String authorization = request.getHeader(
KerberosAuthenticator.AUTHORIZATION);
if (authorization == null
|| !authorization.startsWith(KerberosAuthenticator.NEGOTIATE)) {
response.setHeader(WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
if (authorization == null) {
LOG.trace("SPNEGO starting for url: {}", request.getRequestURL());
} else {
LOG.warn("'" + KerberosAuthenticator.AUTHORIZATION +
"' does not start with '" +
KerberosAuthenticator.NEGOTIATE + "' : {}", authorization);
}
} else {
authorization = authorization.substring(
KerberosAuthenticator.NEGOTIATE.length()).trim();
final Base64 base64 = new Base64(0);
final byte[] clientToken = base64.decode(authorization);
try {
final String serverPrincipal =
KerberosUtil.getTokenServerName(clientToken);
if (!serverPrincipal.startsWith("HTTP/")) {
throw new IllegalArgumentException(
"Invalid server principal " + serverPrincipal +
"decoded from client request");
}
token = Subject.doAs(serverSubject,
new PrivilegedExceptionAction<AuthenticationToken>() {
@Override
public AuthenticationToken run() throws Exception {
return runWithPrincipal(serverPrincipal, clientToken,
base64, response);
}
});
} catch (PrivilegedActionException ex) {
if (ex.getException() instanceof IOException) {
throw (IOException) ex.getException();
} else {
throw new AuthenticationException(ex.getException());
}
} catch (Exception ex) {
throw new AuthenticationException(ex);
}
}
return token;
} | @Test
public void testRequestWithInvalidAuthorization() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION))
.thenReturn("invalid");
Assert.assertNull(handler.authenticate(request, response));
Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE,
KerberosAuthenticator.NEGOTIATE);
Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
} |
public static Builder builder(String bucket, String testClassName, Credentials credentials) {
checkArgument(!bucket.equals(""));
checkArgument(!testClassName.equals(""));
return new Builder(bucket, testClassName, credentials);
} | @Test
public void testBuilderWithEmptyTestClassName() {
assertThrows(
IllegalArgumentException.class, () -> GcsResourceManager.builder(BUCKET, "", null).build());
} |
public TopicList getAllTopicList() {
TopicList topicList = new TopicList();
try {
this.lock.readLock().lockInterruptibly();
topicList.getTopicList().addAll(this.topicQueueTable.keySet());
} catch (Exception e) {
log.error("getAllTopicList Exception", e);
} finally {
this.lock.readLock().unlock();
}
return topicList;
} | @Test
public void testGetAllTopicList() {
byte[] topicInfo = routeInfoManager.getAllTopicList().encode();
Assert.assertTrue(topicInfo != null);
assertThat(topicInfo).isNotNull();
} |
public void createOrUpdate(final String key, final String value, final CreateMode mode) {
String val = StringUtils.isEmpty(value) ? "" : value;
try {
client.create().orSetData().creatingParentsIfNeeded().withMode(mode).forPath(key, val.getBytes(StandardCharsets.UTF_8));
} catch (Exception e) {
throw new ShenyuException(e);
}
} | @Test
void createOrUpdate() throws Exception {
assertThrows(ShenyuException.class, () ->
client.createOrUpdate("/test", "hello", CreateMode.PERSISTENT));
CreateBuilder createBuilder = mock(CreateBuilder.class);
when(curatorFramework.create()).thenReturn(createBuilder);
CreateBuilder2 createBuilder2 = mock(CreateBuilder2.class);
when(createBuilder.orSetData()).thenReturn(createBuilder2);
ProtectACLCreateModeStatPathAndBytesable protectACLCreateModeStatPathAndBytesable = mock(ProtectACLCreateModeStatPathAndBytesable.class);
when(createBuilder2.creatingParentsIfNeeded()).thenReturn(protectACLCreateModeStatPathAndBytesable);
ACLBackgroundPathAndBytesable pathAndBytesable = mock(ACLBackgroundPathAndBytesable.class);
when(protectACLCreateModeStatPathAndBytesable.withMode(any())).thenReturn(pathAndBytesable);
when(pathAndBytesable.forPath(anyString(), any(byte[].class))).thenReturn(null);
client.createOrUpdate("/test", "hello", CreateMode.PERSISTENT);
client.createOrUpdate("", "hello", CreateMode.PERSISTENT);
client.createOrUpdate("", (Object) null, CreateMode.PERSISTENT);
client.createOrUpdate("", new Object(), CreateMode.PERSISTENT);
} |
public MatchIterator getEqualOrdinals(int fromOrdinal) {
int hashCode = HashCodes.hashInt(fromOrdinal);
int bucket = hashCode & (fromOrdinalsMap.length - 1);
while(fromOrdinalsMap[bucket] != -1L) {
if((int)fromOrdinalsMap[bucket] == fromOrdinal) {
if((fromOrdinalsMap[bucket] & Long.MIN_VALUE) != 0L)
return new PivotedMatchIterator((int)((fromOrdinalsMap[bucket] & Long.MAX_VALUE) >> 32));
return new SingleMatchIterator((int)(fromOrdinalsMap[bucket] >> 32));
}
bucket = (bucket + 1) & (fromOrdinalsMap.length - 1);
}
return EmptyMatchIterator.INSTANCE;
} | @Test
public void testFromOrdinals() {
assertMatchIterator(map.getEqualOrdinals(1), 1, 2, 3);
assertMatchIterator(map.getEqualOrdinals(2), 4, 5, 6);
assertMatchIterator(map.getEqualOrdinals(3), 7, 8, 9);
assertMatchIterator(map.getEqualOrdinals(100), 5025);
assertMatchIterator(map.getEqualOrdinals(200));
} |
@Override
public String toString() {
return "ChildEipStatistic{" +
"id='" + id + '\'' +
", eipStatisticMap=" + eipStatisticMap +
'}';
} | @Test
public void testToString() {
String toString = getInstance().toString();
assertNotNull(toString);
assertTrue(toString.contains("ChildEipStatistic"));
} |
@Override
public void validTenant(Long id) {
TenantDO tenant = getTenant(id);
if (tenant == null) {
throw exception(TENANT_NOT_EXISTS);
}
if (tenant.getStatus().equals(CommonStatusEnum.DISABLE.getStatus())) {
throw exception(TENANT_DISABLE, tenant.getName());
}
if (DateUtils.isExpired(tenant.getExpireTime())) {
throw exception(TENANT_EXPIRE, tenant.getName());
}
} | @Test
public void testValidTenant_notExists() {
assertServiceException(() -> tenantService.validTenant(randomLongId()), TENANT_NOT_EXISTS);
} |
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final P4PipelineModel other = (P4PipelineModel) obj;
return Objects.equals(this.tables, other.tables)
&& Objects.equals(this.counters, other.counters)
&& Objects.equals(this.meters, other.meters)
&& Objects.equals(this.registers, other.registers)
&& Objects.equals(this.actionProfiles, other.actionProfiles)
&& Objects.equals(this.packetOperations, other.packetOperations)
&& this.fingerprint == other.fingerprint;
} | @Test
public void testEquals() {
new EqualsTester()
.addEqualityGroup(P4_PIPELINE_MODEL_1, SAME_AS_P4_PIPELINE_MODEL_1)
.addEqualityGroup(P4_PIPELINE_MODEL_2)
.addEqualityGroup(P4_PIPELINE_MODEL_3)
.addEqualityGroup(P4_PIPELINE_MODEL_4)
.testEquals();
} |
@Override
public Image getImageResponseFromBody(String responseBody) {
return new ImageDeserializer().fromJSON(responseBody);
} | @Test
public void shouldDeserializeImageFromJson() throws Exception {
com.thoughtworks.go.plugin.domain.common.Image image = new ArtifactMessageConverterV2().getImageResponseFromBody("{\"content_type\":\"foo\", \"data\":\"bar\"}");
assertThat(image.getContentType(), is("foo"));
assertThat(image.getData(), is("bar"));
} |
public static long findAndVerifyWindowGrace(final GraphNode graphNode) {
return findAndVerifyWindowGrace(graphNode, "");
} | @Test
public void shouldExtractGraceFromKStreamSessionWindowAggregateNode() {
final SessionWindows windows = SessionWindows.ofInactivityGapAndGrace(ofMillis(10L), ofMillis(1234L));
final StatefulProcessorNode<String, Long> node = new StatefulProcessorNode<>(
"asdf",
new ProcessorParameters<>(
new KStreamSessionWindowAggregate<String, Long, Integer>(
windows,
"asdf",
EmitStrategy.onWindowUpdate(),
null,
null,
null
),
"asdf"
),
(StoreFactory) null
);
final long extracted = GraphGraceSearchUtil.findAndVerifyWindowGrace(node);
assertThat(extracted, is(windows.gracePeriodMs() + windows.inactivityGap()));
} |
public static byte[] getByteArraySlice(byte[] array, int begin, int end) {
byte[] slice = new byte[end - begin + 1];
System.arraycopy(array, begin, slice, 0, slice.length);
return slice;
} | @Test
public void testGetByteArraySlice() throws Exception {
assertArrayEquals(new byte[]{1, 2},
JOrphanUtils.getByteArraySlice(new byte[]{0, 1, 2, 3}, 1, 2));
} |
@Override
public Set<DisjointPath> getDisjointPaths(Topology topology, DeviceId src,
DeviceId dst) {
checkNotNull(src, DEVICE_ID_NULL);
checkNotNull(dst, DEVICE_ID_NULL);
return defaultTopology(topology).getDisjointPaths(src, dst);
} | @Test
public void testGetDisjointPaths() {
VirtualNetwork virtualNetwork = setupVirtualNetworkTopology();
TopologyService topologyService = manager.get(virtualNetwork.id(), TopologyService.class);
Topology topology = topologyService.currentTopology();
VirtualDevice srcVirtualDevice = getVirtualDevice(virtualNetwork.id(), DID1);
VirtualDevice dstVirtualDevice = getVirtualDevice(virtualNetwork.id(), DID2);
// test the getDisjointPaths() method.
Set<DisjointPath> paths = topologyService.getDisjointPaths(topology, srcVirtualDevice.id(),
dstVirtualDevice.id());
assertNotNull("The paths should not be null.", paths);
assertEquals("The paths size did not match.", 1, paths.size());
// test the getDisjointPaths() method using a weight.
LinkWeigher weight = new LinkWeigherAdapter(1.0);
Set<DisjointPath> paths1 = topologyService.getDisjointPaths(topology, srcVirtualDevice.id(),
dstVirtualDevice.id(), weight);
assertNotNull("The paths should not be null.", paths1);
assertEquals("The paths size did not match.", 1, paths1.size());
} |
@Override
protected void parse(final ProtocolFactory protocols, final Local folder) throws AccessDeniedException {
for(Local f : folder.list().filter(new NullFilter<Local>() {
@Override
public boolean accept(Local file) {
if(file.isFile()) {
return "favoriteMetadata".equals(file.getExtension());
}
return false;
}
})) {
this.read(protocols, f);
}
} | @Test
public void testParse() throws AccessDeniedException {
Transmit5BookmarkCollection c = new Transmit5BookmarkCollection();
assertEquals(0, c.size());
c.parse(new ProtocolFactory(new HashSet<>(Collections.singletonList(new TestProtocol(Scheme.sftp)))), new Local("src/test/resources/"));
assertEquals(1, c.size());
} |
public static boolean isNumber(CharSequence value) {
return NumberUtil.isNumber(value);
} | @Test
public void isNumberTest() {
assertTrue(Validator.isNumber("45345365465"));
assertTrue(Validator.isNumber("0004545435"));
assertTrue(Validator.isNumber("5.222"));
assertTrue(Validator.isNumber("0.33323"));
} |
public String getProperty(String name) {
return getProperty(name, true);
} | @Test
public void testGetProperty() throws Exception {
XMLProperties props = XMLProperties.getNonPersistedInstance(Objects.requireNonNull(getClass().getResourceAsStream("XMLProperties.test01.xml")));
assertEquals("123", props.getProperty("foo.bar"));
assertEquals("456", props.getProperty("foo.bar.baz"));
assertNull(props.getProperty("foo"));
assertNull(props.getProperty("nothing.something"));
} |
int parseAndConvert(String[] args) throws Exception {
Options opts = createOptions();
int retVal = 0;
try {
if (args.length == 0) {
LOG.info("Missing command line arguments");
printHelp(opts);
return 0;
}
CommandLine cliParser = new GnuParser().parse(opts, args);
if (cliParser.hasOption(CliOption.HELP.shortSwitch)) {
printHelp(opts);
return 0;
}
FSConfigToCSConfigConverter converter =
prepareAndGetConverter(cliParser);
converter.convert(converterParams);
String outputDir = converterParams.getOutputDirectory();
boolean skipVerification =
cliParser.hasOption(CliOption.SKIP_VERIFICATION.shortSwitch);
if (outputDir != null && !skipVerification) {
validator.validateConvertedConfig(
converterParams.getOutputDirectory());
}
} catch (ParseException e) {
String msg = "Options parsing failed: " + e.getMessage();
logAndStdErr(e, msg);
printHelp(opts);
retVal = -1;
} catch (PreconditionException e) {
String msg = "Cannot start FS config conversion due to the following"
+ " precondition error: " + e.getMessage();
handleException(e, msg);
retVal = -1;
} catch (UnsupportedPropertyException e) {
String msg = "Unsupported property/setting encountered during FS config "
+ "conversion: " + e.getMessage();
handleException(e, msg);
retVal = -1;
} catch (ConversionException | IllegalArgumentException e) {
String msg = "Fatal error during FS config conversion: " + e.getMessage();
handleException(e, msg);
retVal = -1;
} catch (VerificationException e) {
Throwable cause = e.getCause();
String msg = "Verification failed: " + e.getCause().getMessage();
conversionOptions.handleVerificationFailure(cause, msg);
retVal = -1;
}
conversionOptions.handleParsingFinished();
return retVal;
} | @Test
public void testEnabledAsyncScheduling() throws Exception {
setupFSConfigConversionFiles(true);
FSConfigToCSConfigArgumentHandler argumentHandler =
new FSConfigToCSConfigArgumentHandler(conversionOptions, mockValidator);
String[] args = getArgumentsAsArrayWithDefaults("-f",
FSConfigConverterTestCommons.FS_ALLOC_FILE, "-p",
"-a");
argumentHandler.parseAndConvert(args);
assertTrue("-a switch had no effect",
conversionOptions.isEnableAsyncScheduler());
} |
@Override
public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) {
super.onDataReceived(device, data);
if (data.size() < 3) {
onInvalidDataReceived(device, data);
return;
}
final int responseCode = data.getIntValue(Data.FORMAT_UINT8, 0);
final int requestCode = data.getIntValue(Data.FORMAT_UINT8, 1);
final int status = data.getIntValue(Data.FORMAT_UINT8, 2);
if (responseCode != SC_OP_CODE_RESPONSE_CODE) {
onInvalidDataReceived(device, data);
return;
}
if (status != SC_RESPONSE_SUCCESS) {
onSCOperationError(device, requestCode, status);
return;
}
if (requestCode == SC_OP_CODE_REQUEST_SUPPORTED_SENSOR_LOCATIONS) {
final int size = data.size() - 3;
final int[] locations = new int[size];
for (int i = 0; i < size; ++i) {
locations[i] = data.getIntValue(Data.FORMAT_UINT8, 3 + i);
}
onSupportedSensorLocationsReceived(device, locations);
} else {
onSCOperationCompleted(device, requestCode);
}
} | @Test
public void onSCOperationCompleted() {
final MutableData data = new MutableData(new byte[] { 0x10, 0x01, 0x01});
response.onDataReceived(null, data);
assertTrue(success);
assertEquals(0, errorCode);
assertEquals(1, requestCode);
assertNull(locations);
} |
static OkHttpClient prepareOkHttpClient(OkHttpClient okHttpClient, WsRequest wsRequest) {
if (!wsRequest.getTimeOutInMs().isPresent() && !wsRequest.getWriteTimeOutInMs().isPresent()) {
return okHttpClient;
}
OkHttpClient.Builder builder = okHttpClient.newBuilder();
if (wsRequest.getTimeOutInMs().isPresent()) {
builder.readTimeout(wsRequest.getTimeOutInMs().getAsInt(), TimeUnit.MILLISECONDS);
}
if (wsRequest.getWriteTimeOutInMs().isPresent()) {
builder.writeTimeout(wsRequest.getWriteTimeOutInMs().getAsInt(), TimeUnit.MILLISECONDS);
}
return builder.build();
} | @Test
public void override_timeouts_with_request() {
OkHttpClient client = new OkHttpClient.Builder().build();
WsRequest request = new PostRequest("abc").setWriteTimeOutInMs(123).setTimeOutInMs(234);
client = underTest.prepareOkHttpClient(client, request);
assertThat(client.writeTimeoutMillis()).isEqualTo(123);
assertThat(client.readTimeoutMillis()).isEqualTo(234);
} |
@Override
public boolean isRepeatable() {
return httpAsyncRequestProducer.isRepeatable();
} | @Test
public void isRepeatable() {
final HttpAsyncRequestProducer delegate = Mockito.mock(HttpAsyncRequestProducer.class);
final HttpAsyncRequestProducerDecorator decorator = new HttpAsyncRequestProducerDecorator(
delegate, null, null);
decorator.isRepeatable();
Mockito.verify(delegate, Mockito.times(1)).isRepeatable();
} |
@Override
public int compare(Event a, Event b) {
return eventOrder.compare(a, b);
} | @Test
void verifyTestSourceReadSortedCorrectly() {
assertAll(
() -> assertThat(comparator.compare(testRead, runStarted), greaterThan(EQUAL_TO)),
() -> assertThat(comparator.compare(testRead, testRead), equalTo(EQUAL_TO)),
() -> assertThat(comparator.compare(testRead, testParsed), lessThan(EQUAL_TO)),
() -> assertThat(comparator.compare(testRead, suggested), lessThan(EQUAL_TO)),
() -> assertThat(comparator.compare(testRead, feature1Case1Started), lessThan(EQUAL_TO)),
() -> assertThat(comparator.compare(testRead, feature1Case2Started), lessThan(EQUAL_TO)),
() -> assertThat(comparator.compare(testRead, feature1Case3Started), lessThan(EQUAL_TO)),
() -> assertThat(comparator.compare(testRead, feature2Case1Started), lessThan(EQUAL_TO)),
() -> assertThat(comparator.compare(testRead, runFinished), lessThan(EQUAL_TO)));
} |
public static Set<SchemaChangeEventType> resolveSchemaEvolutionOptions(
List<String> includedSchemaEvolutionTypes, List<String> excludedSchemaEvolutionTypes) {
List<SchemaChangeEventType> resultTypes = new ArrayList<>();
if (includedSchemaEvolutionTypes.isEmpty()) {
resultTypes.addAll(Arrays.asList(SchemaChangeEventTypeFamily.ALL));
} else {
for (String includeTag : includedSchemaEvolutionTypes) {
resultTypes.addAll(resolveSchemaEvolutionTag(includeTag));
}
}
for (String excludeTag : excludedSchemaEvolutionTypes) {
resultTypes.removeAll(resolveSchemaEvolutionTag(excludeTag));
}
return new HashSet<>(resultTypes);
} | @Test
public void testResolveSchemaEvolutionOptions() {
assertThat(
ChangeEventUtils.resolveSchemaEvolutionOptions(
Collections.emptyList(), Collections.emptyList()))
.isEqualTo(
Sets.set(
TRUNCATE_TABLE,
RENAME_COLUMN,
CREATE_TABLE,
DROP_TABLE,
ALTER_COLUMN_TYPE,
ADD_COLUMN,
DROP_COLUMN));
assertThat(
ChangeEventUtils.resolveSchemaEvolutionOptions(
Collections.emptyList(), Collections.singletonList("drop")))
.isEqualTo(
Sets.set(
ADD_COLUMN,
ALTER_COLUMN_TYPE,
RENAME_COLUMN,
CREATE_TABLE,
TRUNCATE_TABLE));
assertThat(
ChangeEventUtils.resolveSchemaEvolutionOptions(
Arrays.asList("create", "add"), Collections.emptyList()))
.isEqualTo(Sets.set(ADD_COLUMN, CREATE_TABLE));
assertThat(
ChangeEventUtils.resolveSchemaEvolutionOptions(
Collections.singletonList("column"),
Collections.singletonList("drop.column")))
.isEqualTo(Sets.set(ADD_COLUMN, ALTER_COLUMN_TYPE, RENAME_COLUMN));
assertThat(
ChangeEventUtils.resolveSchemaEvolutionOptions(
Collections.emptyList(), Collections.singletonList("drop.column")))
.isEqualTo(
Sets.set(
ADD_COLUMN,
DROP_TABLE,
TRUNCATE_TABLE,
RENAME_COLUMN,
ALTER_COLUMN_TYPE,
CREATE_TABLE));
} |
protected static void validateTimestampColumnType(
final Optional<String> timestampColumnName,
final Schema avroSchema
) {
if (timestampColumnName.isPresent()) {
if (avroSchema.getField(timestampColumnName.get()) == null) {
throw new IllegalArgumentException("The indicated timestamp field does not exist: "
+ timestampColumnName.get());
}
if (avroSchema.getField(timestampColumnName.get()).schema().getType() != Type.LONG) {
throw new IllegalArgumentException("The timestamp column type should be bigint/long. "
+ timestampColumnName.get() + " type is "
+ avroSchema.getField(timestampColumnName.get()).schema().getType());
}
}
} | @Test
public void shouldThrowIfTimestampColumnTypeNotLong() throws IOException {
// When
final IllegalArgumentException illegalArgumentException = assertThrows(
IllegalArgumentException.class,
() -> DataGenProducer.validateTimestampColumnType(Optional.of("pageid"), getAvroSchema())
);
// Then
assertThat(illegalArgumentException.getMessage(),
CoreMatchers.equalTo("The timestamp column type should be bigint/long. pageid type is STRING") );
} |
@Override
protected List<DavResource> list(final Path directory) throws IOException {
return session.getClient().list(new DAVPathEncoder().encode(directory), 1,
Stream.of(
NextcloudAttributesFinderFeature.OC_FILEID_CUSTOM_NAMESPACE,
NextcloudAttributesFinderFeature.OC_CHECKSUMS_CUSTOM_NAMESPACE,
NextcloudAttributesFinderFeature.OC_SIZE_CUSTOM_NAMESPACE,
DAVTimestampFeature.LAST_MODIFIED_CUSTOM_NAMESPACE,
DAVTimestampFeature.LAST_MODIFIED_SERVER_CUSTOM_NAMESPACE).
collect(Collectors.toSet()));
} | @Test
public void testList() throws Exception {
final Path home = new DefaultHomeFinderService(session).find();
final Path directory = new DAVDirectoryFeature(session, new NextcloudAttributesFinderFeature(session)).mkdir(new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus());
final PathAttributes directoryAttributes = new DAVAttributesFinderFeature(session).find(directory);
final String folderEtag = directoryAttributes.getETag();
final long folderTimestamp = directoryAttributes.getModificationDate();
final Path test = new DAVTouchFeature(new NextcloudWriteFeature(session)).touch(new Path(directory,
new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus());
assertEquals(Protocol.DirectoryTimestamp.implicit, session.getHost().getProtocol().getDirectoryTimestamp());
assertNotEquals(folderTimestamp, new DAVAttributesFinderFeature(session).find(directory).getModificationDate());
assertNotEquals(folderEtag, new DAVAttributesFinderFeature(session).find(directory).getETag());
try {
final AttributedList<Path> list = new NextcloudListService(session).list(directory,
new DisabledListProgressListener());
assertEquals(1, list.size());
assertNotNull(list.find(new SimplePathPredicate(test)));
assertNotNull(list.find(new SimplePathPredicate(test)).attributes().getFileId());
}
finally {
new DAVDeleteFeature(session).delete(Arrays.asList(test, directory), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
} |
@SuppressWarnings("MethodLength")
static void dissectControlRequest(
final ArchiveEventCode eventCode,
final MutableDirectBuffer buffer,
final int offset,
final StringBuilder builder)
{
int encodedLength = dissectLogHeader(CONTEXT, eventCode, buffer, offset, builder);
HEADER_DECODER.wrap(buffer, offset + encodedLength);
encodedLength += MessageHeaderDecoder.ENCODED_LENGTH;
switch (eventCode)
{
case CMD_IN_CONNECT:
CONNECT_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendConnect(builder);
break;
case CMD_IN_CLOSE_SESSION:
CLOSE_SESSION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendCloseSession(builder);
break;
case CMD_IN_START_RECORDING:
START_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStartRecording(builder);
break;
case CMD_IN_STOP_RECORDING:
STOP_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopRecording(builder);
break;
case CMD_IN_REPLAY:
REPLAY_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendReplay(builder);
break;
case CMD_IN_STOP_REPLAY:
STOP_REPLAY_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopReplay(builder);
break;
case CMD_IN_LIST_RECORDINGS:
LIST_RECORDINGS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendListRecordings(builder);
break;
case CMD_IN_LIST_RECORDINGS_FOR_URI:
LIST_RECORDINGS_FOR_URI_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendListRecordingsForUri(builder);
break;
case CMD_IN_LIST_RECORDING:
LIST_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendListRecording(builder);
break;
case CMD_IN_EXTEND_RECORDING:
EXTEND_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendExtendRecording(builder);
break;
case CMD_IN_RECORDING_POSITION:
RECORDING_POSITION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendRecordingPosition(builder);
break;
case CMD_IN_TRUNCATE_RECORDING:
TRUNCATE_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendTruncateRecording(builder);
break;
case CMD_IN_STOP_RECORDING_SUBSCRIPTION:
STOP_RECORDING_SUBSCRIPTION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopRecordingSubscription(builder);
break;
case CMD_IN_STOP_POSITION:
STOP_POSITION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopPosition(builder);
break;
case CMD_IN_FIND_LAST_MATCHING_RECORD:
FIND_LAST_MATCHING_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendFindLastMatchingRecord(builder);
break;
case CMD_IN_LIST_RECORDING_SUBSCRIPTIONS:
LIST_RECORDING_SUBSCRIPTIONS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendListRecordingSubscriptions(builder);
break;
case CMD_IN_START_BOUNDED_REPLAY:
BOUNDED_REPLAY_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStartBoundedReplay(builder);
break;
case CMD_IN_STOP_ALL_REPLAYS:
STOP_ALL_REPLAYS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopAllReplays(builder);
break;
case CMD_IN_REPLICATE:
REPLICATE_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendReplicate(builder);
break;
case CMD_IN_STOP_REPLICATION:
STOP_REPLICATION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopReplication(builder);
break;
case CMD_IN_START_POSITION:
START_POSITION_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStartPosition(builder);
break;
case CMD_IN_DETACH_SEGMENTS:
DETACH_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendDetachSegments(builder);
break;
case CMD_IN_DELETE_DETACHED_SEGMENTS:
DELETE_DETACHED_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendDeleteDetachedSegments(builder);
break;
case CMD_IN_PURGE_SEGMENTS:
PURGE_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendPurgeSegments(builder);
break;
case CMD_IN_ATTACH_SEGMENTS:
ATTACH_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendAttachSegments(builder);
break;
case CMD_IN_MIGRATE_SEGMENTS:
MIGRATE_SEGMENTS_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendMigrateSegments(builder);
break;
case CMD_IN_AUTH_CONNECT:
AUTH_CONNECT_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendAuthConnect(builder);
break;
case CMD_IN_KEEP_ALIVE:
KEEP_ALIVE_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendKeepAlive(builder);
break;
case CMD_IN_TAGGED_REPLICATE:
TAGGED_REPLICATE_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendTaggedReplicate(builder);
break;
case CMD_IN_START_RECORDING2:
START_RECORDING_REQUEST2_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStartRecording2(builder);
break;
case CMD_IN_EXTEND_RECORDING2:
EXTEND_RECORDING_REQUEST2_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendExtendRecording2(builder);
break;
case CMD_IN_STOP_RECORDING_BY_IDENTITY:
STOP_RECORDING_BY_IDENTITY_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendStopRecordingByIdentity(builder);
break;
case CMD_IN_PURGE_RECORDING:
PURGE_RECORDING_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendPurgeRecording(builder);
break;
case CMD_IN_REPLICATE2:
REPLICATE_REQUEST2_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendReplicate2(builder);
break;
case CMD_IN_REQUEST_REPLAY_TOKEN:
REPLAY_TOKEN_REQUEST_DECODER.wrap(
buffer,
offset + encodedLength,
HEADER_DECODER.blockLength(),
HEADER_DECODER.version());
appendReplayToken(builder);
break;
default:
builder.append(": unknown command");
}
} | @Test
void controlRequestStopRecording()
{
internalEncodeLogHeader(buffer, 0, 32, 64, () -> 5_600_000_000L);
final StopRecordingRequestEncoder requestEncoder = new StopRecordingRequestEncoder();
requestEncoder.wrapAndApplyHeader(buffer, LOG_HEADER_LENGTH, headerEncoder)
.controlSessionId(5)
.correlationId(42)
.streamId(7)
.channel("bar");
dissectControlRequest(CMD_IN_STOP_RECORDING, buffer, 0, builder);
assertEquals("[5.600000000] " + CONTEXT + ": " + CMD_IN_STOP_RECORDING.name() + " [32/64]:" +
" controlSessionId=5" +
" correlationId=42" +
" streamId=7" +
" channel=bar",
builder.toString());
} |
@Override
public boolean retryRequest(IOException exception, int executionCount, HttpContext ctx) {
log.fine(() -> String.format("retryRequest(exception='%s', executionCount='%d', ctx='%s'",
exception.getClass().getName(), executionCount, ctx));
HttpClientContext clientCtx = HttpClientContext.adapt(ctx);
if (!predicate.test(exception, clientCtx)) {
log.fine(() -> String.format("Not retrying for '%s'", ctx));
return false;
}
if (executionCount > maxRetries) {
log.fine(() -> String.format("Max retries exceeded for '%s'", ctx));
retryFailedConsumer.onRetryFailed(exception, executionCount, clientCtx);
return false;
}
Duration delay = delaySupplier.getDelay(executionCount);
log.fine(() -> String.format("Retrying after %s for '%s'", delay, ctx));
retryConsumer.onRetry(exception, delay, executionCount, clientCtx);
sleeper.sleep(delay);
return true;
} | @Test
void retry_with_fixed_backoff_sleeps_for_expected_durations() {
Sleeper sleeper = mock(Sleeper.class);
Duration startDelay = Duration.ofMillis(500);
Duration maxDelay = Duration.ofSeconds(5);
int maxRetries = 10;
DelayedConnectionLevelRetryHandler handler = DelayedConnectionLevelRetryHandler.Builder
.withExponentialBackoff(startDelay, maxDelay, maxRetries)
.withSleeper(sleeper)
.build();
IOException exception = new IOException();
HttpClientContext ctx = new HttpClientContext();
int lastExecutionCount = maxRetries + 1;
for (int i = 1; i <= lastExecutionCount; i++) {
handler.retryRequest(exception, i, ctx);
}
verify(sleeper).sleep(startDelay);
verify(sleeper).sleep(Duration.ofSeconds(1));
verify(sleeper).sleep(Duration.ofSeconds(2));
verify(sleeper).sleep(Duration.ofSeconds(4));
verify(sleeper, times(6)).sleep(Duration.ofSeconds(5));
} |
public URLNormalizer replaceIPWithDomainName() {
URL u = toURL();
if (!PATTERN_DOMAIN.matcher(u.getHost()).matches()) {
try {
InetAddress addr = InetAddress.getByName(u.getHost());
String host = addr.getHostName();
if (!u.getHost().equalsIgnoreCase(host)) {
url = url.replaceFirst(u.getHost(), host);
}
} catch (UnknownHostException e) {
logger.debug("Cannot resolve IP to host for :" + u.getHost(), e);
}
}
return this;
} | @Test
public void testReplaceIPWithDomainName() {
s = "http://208.80.154.224/wiki/Main_Page";
t = null;
// System.out.println("Result: " + n(s).replaceIPWithDomainName().toString());
Assert.assertTrue(
n(s).replaceIPWithDomainName().toString().contains("wikimedia"));
s = "http://wikipedia.org/wiki/Main_Page";
t = "http://wikipedia.org/wiki/Main_Page";
assertEquals(t, n(s).replaceIPWithDomainName().toString());
s = "http://200.200.200.200/nohost.html";
t = "http://200.200.200.200/nohost.html";
assertEquals(t, n(s).replaceIPWithDomainName().toString());
} |
@Override
public Optional<ComputationConfig> fetchConfig(String computationId) {
Preconditions.checkArgument(
!computationId.isEmpty(),
"computationId is empty. Cannot fetch computation config without a computationId.");
GetConfigResponse response =
applianceComputationConfigFetcher.fetchConfig(
GetConfigRequest.newBuilder().addComputations(computationId).build());
if (response == null) {
return Optional.empty();
}
for (Windmill.GetConfigResponse.SystemNameToComputationIdMapEntry entry :
response.getSystemNameToComputationIdMapList()) {
systemNameToComputationIdMap.put(entry.getSystemName(), entry.getComputationId());
}
return createComputationConfig(
// We are only fetching the config for 1 computation, so we should only be getting that
// computation back.
Iterables.getOnlyElement(response.getCloudWorksList()),
transformUserNameToStateFamilyByComputationId(response),
response.getNameMapList().stream()
.collect(toImmutableMap(NameMapEntry::getUserName, NameMapEntry::getSystemName)));
} | @Test
public void testGetComputationConfig_errorOnNoComputationConfig() {
StreamingApplianceComputationConfigFetcher configLoader =
createStreamingApplianceConfigLoader();
when(mockWindmillServer.getConfig(any()))
.thenReturn(Windmill.GetConfigResponse.newBuilder().build());
assertThrows(NoSuchElementException.class, () -> configLoader.fetchConfig("someComputationId"));
} |
@Override
public Acl getPermission(final Path file) throws BackgroundException {
try {
final Acl acl = new Acl();
if(containerService.isContainer(file)) {
final BucketAccessControls controls = session.getClient().bucketAccessControls().list(
containerService.getContainer(file).getName()).execute();
for(BucketAccessControl control : controls.getItems()) {
final String entity = control.getEntity();
acl.addAll(this.toUser(entity, control.getEmail()), new Acl.Role(control.getRole()));
}
}
else {
final ObjectAccessControls controls = session.getClient().objectAccessControls().list(containerService.getContainer(file).getName(), containerService.getKey(file)).execute();
for(ObjectAccessControl control : controls.getItems()) {
final String entity = control.getEntity();
acl.addAll(this.toUser(entity, control.getEmail()), this.toRole(control));
}
}
return acl;
}
catch(IOException e) {
final BackgroundException failure = new GoogleStorageExceptionMappingService().map("Failure to read attributes of {0}", e, file);
if(file.isDirectory()) {
if(failure instanceof NotfoundException) {
// No placeholder file may exist but we just have a common prefix
return Acl.EMPTY;
}
}
if(failure instanceof InteroperabilityException) {
// The specified method is not allowed against this resource. The case for delete markers in versioned buckets.
return Acl.EMPTY;
}
throw failure;
}
} | @Test
public void testReadBucketAnalayticsAcl() throws Exception {
final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory));
final GoogleStorageAccessControlListFeature f = new GoogleStorageAccessControlListFeature(session);
final Acl acl = f.getPermission(container);
assertTrue(acl.asList().stream().filter(user -> user.getUser().getIdentifier().equals("[email protected]")).findAny().isPresent());
assertFalse(acl.containsKey(new Acl.GroupUser(Acl.GroupUser.EVERYONE)));
} |
@Override
public Set<NodeHealth> readAll() {
long clusterTime = hzMember.getClusterTime();
long timeout = clusterTime - TIMEOUT_30_SECONDS;
Map<UUID, TimestampedNodeHealth> sqHealthState = readReplicatedMap();
Set<UUID> hzMemberUUIDs = hzMember.getMemberUuids();
Set<NodeHealth> existingNodeHealths = sqHealthState.entrySet().stream()
.filter(outOfDate(timeout))
.filter(ofNonExistentMember(hzMemberUUIDs))
.map(entry -> entry.getValue().getNodeHealth())
.collect(Collectors.toSet());
if (LOG.isTraceEnabled()) {
LOG.trace("Reading {} and keeping {}", new HashMap<>(sqHealthState), existingNodeHealths);
}
return ImmutableSet.copyOf(existingNodeHealths);
} | @Test
public void readAll_logs_message_for_each_timed_out_NodeHealth_ignored_if_TRACE() {
logging.setLevel(Level.TRACE);
Map<UUID, TimestampedNodeHealth> map = new HashMap<>();
UUID memberUuid1 = UUID.randomUUID();
UUID memberUuid2 = UUID.randomUUID();
map.put(memberUuid1, new TimestampedNodeHealth(randomNodeHealth(), clusterTime - 30 * 1000));
map.put(memberUuid2, new TimestampedNodeHealth(randomNodeHealth(), clusterTime - 30 * 1000));
doReturn(map).when(hazelcastMember).getReplicatedMap(MAP_SQ_HEALTH_STATE);
when(hazelcastMember.getMemberUuids()).thenReturn(ImmutableSet.of(memberUuid1, memberUuid2));
when(hazelcastMember.getClusterTime()).thenReturn(clusterTime);
underTest.readAll();
assertThat(logging.getLogs()).hasSize(3);
assertThat(logging.getLogs(Level.TRACE))
.containsOnly(
"Reading " + new HashMap<>(map) + " and keeping []",
"Ignoring NodeHealth of member " + memberUuid1 + " because it is too old",
"Ignoring NodeHealth of member " + memberUuid2 + " because it is too old");
} |
public static int appendToLabel(
final AtomicBuffer metaDataBuffer, final int counterId, final String value)
{
Objects.requireNonNull(metaDataBuffer);
if (counterId < 0)
{
throw new IllegalArgumentException("counter id " + counterId + " is negative");
}
final int maxCounterId = (metaDataBuffer.capacity() / CountersReader.METADATA_LENGTH) - 1;
if (counterId > maxCounterId)
{
throw new IllegalArgumentException(
"counter id " + counterId + " out of range: 0 - maxCounterId=" + maxCounterId);
}
final int counterMetaDataOffset = CountersReader.metaDataOffset(counterId);
final int state = metaDataBuffer.getIntVolatile(counterMetaDataOffset);
if (CountersReader.RECORD_ALLOCATED != state)
{
throw new IllegalArgumentException("counter id " + counterId + " is not allocated, state: " + state);
}
final int existingLabelLength = metaDataBuffer.getInt(counterMetaDataOffset + CountersReader.LABEL_OFFSET);
final int remainingLabelLength = CountersReader.MAX_LABEL_LENGTH - existingLabelLength;
final int writtenLength = metaDataBuffer.putStringWithoutLengthAscii(
counterMetaDataOffset + CountersReader.LABEL_OFFSET + SIZE_OF_INT + existingLabelLength,
value,
0,
remainingLabelLength);
if (writtenLength > 0)
{
metaDataBuffer.putIntOrdered(
counterMetaDataOffset + CountersReader.LABEL_OFFSET, existingLabelLength + writtenLength);
}
return writtenLength;
} | @Test
void appendToLabelShouldAddAPortionOfSuffixUpToTheMaxLength()
{
final CountersManager countersManager = new CountersManager(
new UnsafeBuffer(new byte[CountersReader.METADATA_LENGTH]),
new UnsafeBuffer(ByteBuffer.allocateDirect(CountersReader.COUNTER_LENGTH)),
StandardCharsets.US_ASCII);
final String initialLabel = "this is a test counter";
final int counterId = countersManager.allocate(initialLabel);
final String hugeSuffix = Tests.generateStringWithSuffix(" - 42", "x", CountersReader.MAX_LABEL_LENGTH);
final int length = AeronCounters.appendToLabel(countersManager.metaDataBuffer(), counterId, hugeSuffix);
assertNotEquals(hugeSuffix.length(), length);
assertEquals(CountersReader.MAX_LABEL_LENGTH - initialLabel.length(), length);
assertEquals(initialLabel + hugeSuffix.substring(0, length), countersManager.getCounterLabel(counterId));
} |
static <T, V> ThrowingFunction<KV<T, V>, KV<T, String>> createToStringFunctionForPTransform(
String ptransformId, PTransform pTransform) {
return (KV<T, V> input) -> KV.of(input.getKey(), Objects.toString(input.getValue()));
} | @Test
public void testPrimitiveToString() throws Exception {
String pTransformId = "pTransformId";
SdkComponents components = SdkComponents.create();
components.registerEnvironment(Environments.createDockerEnvironment("java"));
RunnerApi.PTransform pTransform = RunnerApi.PTransform.newBuilder().build();
ThrowingFunction<KV<String, Integer>, KV<String, String>> toStringFunction =
ToStringFnRunner.createToStringFunctionForPTransform(pTransformId, pTransform);
KV<String, Integer> input = KV.of("key", 12345);
assertEquals(KV.of("key", "12345"), toStringFunction.apply(input));
} |
public static OAuthBearerValidationResult validateClaimForExistenceAndType(OAuthBearerUnsecuredJws jwt,
boolean required, String claimName, Class<?>... allowedTypes) {
Object rawClaim = Objects.requireNonNull(jwt).rawClaim(Objects.requireNonNull(claimName));
if (rawClaim == null)
return required
? OAuthBearerValidationResult.newFailure(String.format("Required claim missing: %s", claimName))
: OAuthBearerValidationResult.newSuccess();
for (Class<?> allowedType : allowedTypes) {
if (allowedType != null && allowedType.isAssignableFrom(rawClaim.getClass()))
return OAuthBearerValidationResult.newSuccess();
}
return OAuthBearerValidationResult.newFailure(String.format("The %s claim had the incorrect type: %s",
claimName, rawClaim.getClass().getSimpleName()));
} | @Test
public void validateClaimForExistenceAndType() throws OAuthBearerIllegalTokenException {
String claimName = "foo";
for (Boolean exists : new Boolean[] {null, Boolean.TRUE, Boolean.FALSE}) {
boolean useErrorValue = exists == null;
for (Boolean required : new boolean[] {true, false}) {
StringBuilder sb = new StringBuilder("{");
appendJsonText(sb, "exp", 100);
appendCommaJsonText(sb, "sub", "principalName");
if (useErrorValue)
appendCommaJsonText(sb, claimName, 1);
else if (exists)
appendCommaJsonText(sb, claimName, claimName);
sb.append("}");
String compactSerialization = HEADER_COMPACT_SERIALIZATION + Base64.getUrlEncoder().withoutPadding()
.encodeToString(sb.toString().getBytes(StandardCharsets.UTF_8)) + ".";
OAuthBearerUnsecuredJws testJwt = new OAuthBearerUnsecuredJws(compactSerialization, "sub", "scope");
OAuthBearerValidationResult result = OAuthBearerValidationUtils
.validateClaimForExistenceAndType(testJwt, required, claimName, String.class);
if (useErrorValue || required && !exists)
assertTrue(isFailureWithMessageAndNoFailureScope(result));
else
assertTrue(isSuccess(result));
}
}
} |
public static <K, InputT> GroupIntoBatches<K, InputT> ofByteSize(long batchSizeBytes) {
return new GroupIntoBatches<K, InputT>(BatchingParams.createDefault())
.withByteSize(batchSizeBytes);
} | @Test
@Category({
ValidatesRunner.class,
NeedsRunner.class,
UsesTimersInParDo.class,
UsesStatefulParDo.class,
UsesOnWindowExpiration.class
})
public void testInGlobalWindowBatchSizeByteSize() {
PCollection<KV<String, Iterable<String>>> collection =
pipeline
.apply("Input data", Create.of(data))
.apply(GroupIntoBatches.ofByteSize(BATCH_SIZE_BYTES))
// set output coder
.setCoder(KvCoder.of(StringUtf8Coder.of(), IterableCoder.of(StringUtf8Coder.of())));
PAssert.that("Incorrect batch size in one or more elements", collection)
.satisfies(
new SerializableFunction<Iterable<KV<String, Iterable<String>>>, Void>() {
@Override
public Void apply(Iterable<KV<String, Iterable<String>>> input) {
assertTrue(checkBatchByteSizes(input));
return null;
}
});
PAssert.thatSingleton("Incorrect collection size", collection.apply("Count", Count.globally()))
.isEqualTo(4L);
pipeline.run();
} |
public Date retryAfter() {
return Optional.ofNullable(retryAfter).map(Date::new).orElse(null);
} | @Test
public void retryAfterTest() {
Date date = retryableException.retryAfter();
Assert.assertNotNull(date);
} |
@IntRange(from = 0)
public int size() {
return buffer.size();
} | @Test
public void size() {
final DataStream stream = new DataStream();
stream.write(new byte[] { 0, 1, 2, 3, 4, 5, 6});
assertEquals(7, stream.size());
} |
@Override
public final Set<Entry<K, V>> entrySet() {
return delegate.entrySet();
} | @Test
public void requireThatSingletonEntryImplementsEquals() {
Map.Entry<String, String> map = newSingletonMap("foo", "bar").entrySet().iterator().next();
assertNotEquals(map, null);
assertNotEquals(map, new Object());
assertEquals(map, map);
assertNotEquals(map, newSingletonMap("baz", "cox").entrySet().iterator().next());
assertNotEquals(map, newSingletonMap("foo", "cox").entrySet().iterator().next());
assertEquals(map, newSingletonMap("foo", "bar").entrySet().iterator().next());
} |
public static <T extends Type> Type decodeIndexedValue(
String rawInput, TypeReference<T> typeReference) {
return decoder.decodeEventParameter(rawInput, typeReference);
} | @Test
public void testDecodeIndexedBytes16Value() {
String rawInput = "0x1234567890123456789012345678901200000000000000000000000000000000";
byte[] rawInputBytes = Numeric.hexStringToByteArray(rawInput.substring(0, 34));
assertEquals(
FunctionReturnDecoder.decodeIndexedValue(rawInput, new TypeReference<Bytes16>() {}),
(new Bytes16(rawInputBytes)));
} |
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) {
return decoder.decodeFunctionResult(rawInput, outputParameters);
} | @Test
public void testDecodeStaticStructNested() {
String rawInput =
"0x0000000000000000000000000000000000000000000000000000000000000001"
+ "000000000000000000000000000000000000000000000000000000000000000a"
+ "0000000000000000000000000000000000000000000000000000000000000001";
assertEquals(
FunctionReturnDecoder.decode(
rawInput, AbiV2TestFixture.getFuzzFunction.getOutputParameters()),
Collections.singletonList(
new AbiV2TestFixture.Fuzz(
new AbiV2TestFixture.Bar(BigInteger.ONE, BigInteger.TEN),
BigInteger.ONE)));
} |
public static boolean containsUpperCase(final long word) {
return applyUpperCasePattern(word) != 0;
} | @Test
void containsUpperCaseLong() {
// given
final byte[] asciiTable = getExtendedAsciiTable();
shuffleArray(asciiTable, random);
// when
for (int idx = 0; idx < asciiTable.length; idx += Long.BYTES) {
final long value = getLong(asciiTable, idx);
final boolean actual = SWARUtil.containsUpperCase(value);
boolean expected = false;
for (int i = 0; i < Long.BYTES; i++) {
expected |= Character.isUpperCase(asciiTable[idx + i]);
}
// then
assertEquals(expected, actual);
}
} |
private CompletionStage<RestResponse> pushStateStatus(RestRequest request) {
return statusOperation(request, PUSH_STATE_STATUS);
} | @Test
public void testPushState() {
RestCacheClient cache = getCacheClient(LON);
RestCacheClient backupCache = getCacheClient(NYC);
String key = "key";
String value = "value";
Function<String, Integer> keyOnBackup = k -> responseStatus(backupCache.get(key));
takeBackupOffline(LON, NYC);
assertEquals(OFFLINE, getBackupStatus(LON, NYC));
assertEquals(ONLINE, getBackupStatus(LON, SFO));
assertNoContent(cache.put(key, value));
assertEquals(404, (int) keyOnBackup.apply(key));
assertSuccessful(cache.pushSiteState(NYC));
assertEquals(ONLINE, getBackupStatus(LON, NYC));
eventuallyEquals("OK", () -> pushStateStatus(cache, NYC));
assertEquals(200, responseStatus(backupCache.get(key)));
} |
public void listenToCluster(String clusterName)
{
// if cluster name is a symlink, watch for D2SymlinkNode instead
String resourceName = D2_CLUSTER_NODE_PREFIX + clusterName;
if (SymlinkUtil.isSymlinkNodeOrPath(clusterName))
{
listenToSymlink(clusterName, resourceName);
}
else
{
_watchedClusterResources.computeIfAbsent(clusterName, k ->
{
XdsClient.NodeResourceWatcher watcher = getClusterResourceWatcher(clusterName);
_xdsClient.watchXdsResource(resourceName, watcher);
return watcher;
});
}
} | @Test
public void testListenToNormalCluster()
{
XdsToD2PropertiesAdaptorFixture fixture = new XdsToD2PropertiesAdaptorFixture();
fixture.getSpiedAdaptor().listenToCluster(PRIMARY_CLUSTER_NAME);
verify(fixture._xdsClient).watchXdsResource(eq(PRIMARY_CLUSTER_RESOURCE_NAME), anyNodeWatcher());
verifyClusterNodeUpdate(fixture, PRIMARY_CLUSTER_NAME, null, PRIMARY_CLUSTER_PROPERTIES);
} |
IdBatchAndWaitTime newIdBaseLocal(int batchSize) {
return newIdBaseLocal(Clock.currentTimeMillis(), getNodeId(), batchSize);
} | @Test
public void when_10mIdsInOneBatch_then_wait() {
int batchSize = 10_000_000;
IdBatchAndWaitTime result = gen.newIdBaseLocal(1516028439000L, 1234, batchSize);
assertEquals(batchSize / IDS_PER_SECOND - DEFAULT_ALLOWED_FUTURE_MILLIS, result.waitTimeMillis);
} |
public static byte[] serialize(final Object body) throws IOException {
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
final ObjectOutputStream outputStream = new ObjectOutputStream(byteArrayOutputStream);
try {
outputStream.writeObject(body);
return byteArrayOutputStream.toByteArray();
} catch (NotSerializableException exception) {
throw new RuntimeCamelException(exception);
} finally {
byteArrayOutputStream.close();
outputStream.close();
}
} | @Test
public void testSerializationOfString() throws Exception {
String in = "Hello World!";
byte[] expected = PulsarMessageUtils.serialize(in);
assertNotNull(expected);
} |
static InvokerResult convertToResult(Query query, SearchProtocol.SearchReply protobuf,
DocumentDatabase documentDatabase, int partId, int distKey)
{
InvokerResult result = new InvokerResult(query, protobuf.getHitsCount());
result.getResult().setTotalHitCount(protobuf.getTotalHitCount());
result.getResult().setCoverage(convertToCoverage(protobuf));
convertSearchReplyErrors(result.getResult(), protobuf.getErrorsList());
List<String> featureNames = protobuf.getMatchFeatureNamesList();
var haveMatchFeatures = ! featureNames.isEmpty();
MatchFeatureData matchFeatures = haveMatchFeatures ? new MatchFeatureData(featureNames) : null;
var haveGrouping = ! protobuf.getGroupingBlob().isEmpty();
if (haveGrouping) {
BufferSerializer buf = new BufferSerializer(new GrowableByteBuffer(protobuf.getGroupingBlob().asReadOnlyByteBuffer()));
int cnt = buf.getInt(null);
ArrayList<Grouping> list = new ArrayList<>(cnt);
for (int i = 0; i < cnt; i++) {
Grouping g = new Grouping();
g.deserialize(buf);
list.add(g);
}
GroupingListHit hit = new GroupingListHit(list, documentDatabase, query);
result.getResult().hits().add(hit);
}
for (var replyHit : protobuf.getHitsList()) {
LeanHit hit = (replyHit.getSortData().isEmpty())
? new LeanHit(replyHit.getGlobalId().toByteArray(), partId, distKey, replyHit.getRelevance())
: new LeanHit(replyHit.getGlobalId().toByteArray(), partId, distKey, replyHit.getRelevance(), replyHit.getSortData().toByteArray());
if (haveMatchFeatures) {
var hitFeatures = matchFeatures.addHit();
var featureList = replyHit.getMatchFeaturesList();
if (featureList.size() == featureNames.size()) {
int idx = 0;
for (SearchProtocol.Feature value : featureList) {
ByteString tensorBlob = value.getTensor();
if (tensorBlob.isEmpty()) {
hitFeatures.set(idx++, value.getNumber());
} else {
hitFeatures.set(idx++, tensorBlob.toByteArray());
}
}
hit.addMatchFeatures(hitFeatures);
} else {
result.getResult().hits().addError(ErrorMessage.createBackendCommunicationError("mismatch in match feature sizes"));
}
}
result.getLeanHits().add(hit);
}
var slimeTrace = protobuf.getSlimeTrace();
if ( ! slimeTrace.isEmpty()) {
var traces = new Value.ArrayValue();
traces.add(new SlimeAdapter(BinaryFormat.decode(slimeTrace.toByteArray()).get()));
query.trace(traces, query.getTrace().getLevel());
}
return result;
} | @Test
void testSearchReplyDecodingWithRelevance() {
Query q = new Query("search/?query=test");
InvokerResult result = ProtobufSerialization.convertToResult(q, createSearchReply(5, false), null, 1, 2);
assertEquals(result.getResult().getTotalHitCount(), 7);
List<LeanHit> hits = result.getLeanHits();
assertEquals(5, hits.size());
double expectedRelevance = 5;
int hitNum = 0;
for (LeanHit hit : hits) {
assertEquals('a', hit.getGid()[0]);
assertEquals(hitNum, hit.getGid()[11]);
assertEquals(expectedRelevance--, hit.getRelevance(), DELTA);
assertEquals(1, hit.getPartId());
assertEquals(2, hit.getDistributionKey());
assertFalse(hit.hasSortData());
hitNum++;
}
} |
protected final AnyKeyboardViewBase getMiniKeyboard() {
return mMiniKeyboard;
} | @Test
public void testShortPressWhenNoPrimaryKeyButTextWithPopupShouldOutputText() throws Exception {
ExternalAnyKeyboard anyKeyboard =
new ExternalAnyKeyboard(
new DefaultAddOn(getApplicationContext(), getApplicationContext()),
getApplicationContext(),
keyboard_with_keys_with_no_codes,
keyboard_with_keys_with_no_codes,
"test",
0,
0,
"en",
"",
"",
KEYBOARD_ROW_MODE_NORMAL);
anyKeyboard.loadKeyboard(mViewUnderTest.mKeyboardDimens);
mViewUnderTest.setKeyboard(anyKeyboard, 0);
final AnyKeyboard.AnyKey key = (AnyKeyboard.AnyKey) anyKeyboard.getKeys().get(6);
Assert.assertEquals(0, key.getPrimaryCode());
Assert.assertEquals(0, key.getCodesCount());
Assert.assertEquals(R.xml.popup_16keys_wxyz, key.popupResId);
Assert.assertEquals("popup", key.label);
Assert.assertNull(key.popupCharacters);
ViewTestUtils.navigateFromTo(mViewUnderTest, key, key, 30, true, false);
Assert.assertNull(mViewUnderTest.getMiniKeyboard());
Assert.assertFalse(mViewUnderTest.mMiniKeyboardPopup.isShowing());
ViewTestUtils.navigateFromTo(mViewUnderTest, key, key, 10, false, true);
Mockito.verify(mMockKeyboardListener, Mockito.never())
.onKey(
anyInt(),
nullable(Keyboard.Key.class),
anyInt(),
Mockito.nullable(int[].class),
Mockito.anyBoolean());
Mockito.verify(mMockKeyboardListener).onText(same(key), eq("popping"));
} |
@GetMapping("/authenticate")
public ResponseEntity<UsernamePasswordAuthenticationToken> getAuthentication(@RequestParam String token) {
UsernamePasswordAuthenticationToken authentication = tokenService.getAuthentication(token);
return ResponseEntity.ok(authentication);
} | @Test
void givenValidToken_whenGetAuthentication_thenReturnAuthentication() throws Exception {
// Given
String validToken = "validToken";
UsernamePasswordAuthenticationToken authenticationToken = new UsernamePasswordAuthenticationToken("user", "password");
// When
when(tokenService.getAuthentication(validToken)).thenReturn(authenticationToken);
// Then
mockMvc.perform(MockMvcRequestBuilders.get("/api/v1/users/authenticate")
.param("token", validToken))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$.principal").value("user"))
.andExpect(MockMvcResultMatchers.jsonPath("$.credentials").value("password"));
// Verify
verify(tokenService, times(1)).getAuthentication(validToken);
} |
public void mirrorKeys() {
/* how to mirror?
width = 55
[0..15] [20..35] [40..55]
phase 1: multiple by -1
[0] [-20] [-40]
phase 2: add keyboard width
[55] [35] [15]
phase 3: subtracting the key's width
[40] [20] [0]
cool?
*/
final int keyboardWidth = getMinWidth();
for (Key k : getKeys()) {
var newX = -1 * k.x; // phase 1
newX += keyboardWidth; // phase 2
newX -= k.width; // phase 3
k.x = newX;
}
} | @Test
public void testKeyboardPopupSupportsMirrorOneRowNotFull() throws Exception {
String popupCharacters = "qwe";
AnyPopupKeyboard keyboard =
new AnyPopupKeyboard(
new DefaultAddOn(getApplicationContext(), getApplicationContext()),
getApplicationContext(),
popupCharacters,
SIMPLE_KeyboardDimens,
"POP_KEYBOARD");
int vGap = (int) SIMPLE_KeyboardDimens.getRowVerticalGap();
int hGap = (int) SIMPLE_KeyboardDimens.getKeyHorizontalGap();
final int keyWidth =
(int)
(SIMPLE_KeyboardDimens.getKeyboardMaxWidth()
- SIMPLE_KeyboardDimens.getKeyHorizontalGap() * popupCharacters.length())
/ 10;
assertKeyValues(keyboard, 'q', vGap, 0);
assertKeyValues(keyboard, 'w', vGap, keyWidth);
assertKeyValues(keyboard, 'e', vGap, hGap + 2 * keyWidth);
keyboard.mirrorKeys();
// same order, mirrored X position
assertKeyValues(keyboard, 'q', vGap, 3 * hGap + 2 * keyWidth);
assertKeyValues(keyboard, 'w', vGap, 2 * hGap + keyWidth);
assertKeyValues(keyboard, 'e', vGap, hGap);
} |
@Override
public InputStream fetch(String fetchKey, Metadata metadata, ParseContext parseContext) throws TikaException, IOException {
LOGGER.debug("about to fetch fetchkey={} from endpoint ({})", fetchKey, endpoint);
try {
BlobClient blobClient = blobClientFactory.getClient(fetchKey);
if (extractUserMetadata) {
BlobProperties properties = blobClient.getProperties();
if (properties.getMetadata() != null) {
for (Map.Entry<String, String> e : properties
.getMetadata()
.entrySet()) {
metadata.add(PREFIX + ":" + e.getKey(), e.getValue());
}
}
}
if (!spoolToTemp) {
return TikaInputStream.get(blobClient.openInputStream());
} else {
long start = System.currentTimeMillis();
TemporaryResources tmpResources = new TemporaryResources();
Path tmp = tmpResources.createTempFile();
blobClient.downloadToFile(tmp.toRealPath().toString());
TikaInputStream tis = TikaInputStream.get(tmp, metadata, tmpResources);
long elapsed = System.currentTimeMillis() - start;
LOGGER.debug("took {} ms to copy to local tmp file", elapsed);
return tis;
}
} catch (Exception e) {
throw new IOException("az-blob storage exception", e);
}
} | @Test
public void testConfig() throws Exception {
FetcherManager fetcherManager = FetcherManager.load(Paths.get(this
.getClass()
.getResource("/tika-config-az-blob.xml")
.toURI()));
Fetcher fetcher = fetcherManager.getFetcher("az-blob");
List<Metadata> metadataList = null;
try (Reader reader = new BufferedReader(new InputStreamReader(fetcher.fetch(FETCH_STRING, new Metadata(), new ParseContext()), StandardCharsets.UTF_8))) {
metadataList = JsonMetadataList.fromJson(reader);
}
debug(metadataList);
} |
public static DataSchema buildSchemaByProjection(DataSchema schema, DataMap maskMap)
{
return buildSchemaByProjection(schema, maskMap, Collections.emptyList());
} | @Test
public void testBuildSchemaByProjectionAllowWhitelistedFields()
{
final String whiteListedFieldName = "$URN";
RecordDataSchema schema = (RecordDataSchema) DataTemplateUtil.getSchema(RecordTemplateWithComplexKey.class);
DataMap projectionMask = buildProjectionMaskDataMap("body", whiteListedFieldName);
DataMap nestedMask = buildProjectionMaskDataMap("a", whiteListedFieldName);
projectionMask.put("id", nestedMask);
RecordDataSchema validatingSchema = (RecordDataSchema) buildSchemaByProjection(schema, projectionMask,
Lists.newArrayList(whiteListedFieldName));
Assert.assertTrue(validatingSchema.contains("id"));
Assert.assertTrue(validatingSchema.contains("body"));
Assert.assertFalse(validatingSchema.contains(whiteListedFieldName));
Assert.assertTrue(((RecordDataSchema) validatingSchema.getField("id").getType()).contains("a"));
Assert.assertFalse(((RecordDataSchema) validatingSchema.getField("id").getType()).contains(whiteListedFieldName));
} |
public String getActualValue() {
return actualValue;
} | @Test
public void getValue_returns_toString_of_Object_passed_in_constructor() {
assertThat(new EvaluatedCondition(SOME_CONDITION, SOME_LEVEL, new A()).getActualValue()).isEqualTo("A string");
} |
@Override
public IntStream intStream() {
return IntStream.of(value);
} | @Test
public void testIntStream() throws Exception {
IntSet sis = new SingletonIntSet(3);
assertEquals(1, sis.intStream().count());
} |
public ConfigCenterBuilder highestPriority(Boolean highestPriority) {
this.highestPriority = highestPriority;
return getThis();
} | @Test
void highestPriority() {
ConfigCenterBuilder builder = ConfigCenterBuilder.newBuilder();
builder.highestPriority(true);
Assertions.assertTrue(builder.build().isHighestPriority());
} |
public boolean handleSentinelResourceReconciliation(CR resource, KubernetesClient client) {
if (!isSentinelResource(resource)) {
return false;
}
ResourceID resourceId = ResourceID.fromResource(resource);
sentinelResources.compute(
resourceId,
(id, previousState) -> {
boolean firstReconcile = false;
if (previousState == null) {
firstReconcile = true;
previousState = new SentinelResourceState();
}
previousState.onReconcile(resource);
if (firstReconcile) {
updateSpecAndScheduleHealthCheck(resourceId, previousState, client);
}
return previousState;
});
return true;
} | @Test
@Order(3)
void testHandleSentinelResourceReconciliation() throws InterruptedException {
// Reduce the SENTINEL_RESOURCE_RECONCILIATION_DELAY time to 0
SparkOperatorConfManager.INSTANCE.refresh(
Collections.singletonMap(
SparkOperatorConf.SENTINEL_RESOURCE_RECONCILIATION_DELAY.getKey(), "10"));
// Before Spark Reconciler Started
SparkApplication mockApp = createMockApp(DEFAULT);
kubernetesClient.resource(ReconcilerUtils.clone(mockApp)).create();
KubernetesResourceList<SparkApplication> crList =
kubernetesClient.resources(SparkApplication.class).inNamespace(DEFAULT).list();
SparkApplication sparkApplication = crList.getItems().get(0);
Long generation = sparkApplication.getMetadata().getGeneration();
assertEquals(generation, 1L);
// Spark Reconciler Handle Sentinel Resources at the first time
var sentinelManager = new SentinelManager<SparkApplication>();
sentinelManager.handleSentinelResourceReconciliation(sparkApplication, kubernetesClient);
KubernetesResourceList<SparkApplication> crList2 =
kubernetesClient.resources(SparkApplication.class).inNamespace(DEFAULT).list();
SparkApplication sparkApplication2 = crList2.getItems().get(0);
Map<String, String> sparkConf2 = new HashMap<>(sparkApplication2.getSpec().getSparkConf());
long generation2 = sparkApplication2.getMetadata().getGeneration();
assertEquals(sparkConf2.get(Constants.SENTINEL_RESOURCE_DUMMY_FIELD), "1");
assertEquals(generation2, 2L);
var state2 =
(SentinelManager<SparkApplication>.SentinelResourceState)
sentinelManager.getSentinelResources().get(ResourceID.fromResource(mockApp));
long previousGeneration2 = state2.previousGeneration;
assertTrue(sentinelManager.allSentinelsAreHealthy());
assertEquals(previousGeneration2, 1L);
Thread.sleep(Duration.ofSeconds(SENTINEL_RESOURCE_RECONCILIATION_DELAY_SECONDS * 2).toMillis());
List<SparkApplication> crList3 =
kubernetesClient.resources(SparkApplication.class).inNamespace(DEFAULT).list().getItems();
SparkApplication sparkApplication3 = crList3.get(0);
Map<String, String> sparkConf3 = new HashMap<>(sparkApplication3.getSpec().getSparkConf());
// Spark Sentinel Applications' s k8s generation should change
assertNotEquals(sparkApplication3.getMetadata().getGeneration(), generation2);
// Spark conf SPARK_CONF_SENTINEL_DUMMY_FIELD values should increase
assertNotEquals(
sparkConf2.get(Constants.SENTINEL_RESOURCE_DUMMY_FIELD),
sparkConf3.get(Constants.SENTINEL_RESOURCE_DUMMY_FIELD));
var state3 =
(SentinelManager<SparkApplication>.SentinelResourceState)
sentinelManager.getSentinelResources().get(ResourceID.fromResource(mockApp));
assertEquals(state3.previousGeneration, previousGeneration2);
// Given the 2 * SENTINEL_RESOURCE_RECONCILIATION_DELAY_SECONDS, the reconcile method is
// not called to handleSentinelResourceReconciliation to update
assertFalse(sentinelManager.allSentinelsAreHealthy());
sentinelManager.handleSentinelResourceReconciliation(sparkApplication2, kubernetesClient);
sentinelManager.handleSentinelResourceReconciliation(sparkApplication2, kubernetesClient);
boolean isHealthy;
long currentTimeInMills = System.currentTimeMillis();
do {
isHealthy = sentinelManager.allSentinelsAreHealthy();
} while (!isHealthy
&& notTimedOut(
currentTimeInMills,
TimeUnit.MILLISECONDS.convert(
Duration.ofSeconds(SENTINEL_RESOURCE_RECONCILIATION_DELAY_SECONDS))));
assertTrue(isHealthy);
kubernetesClient.resources(SparkApplication.class).inNamespace(DEFAULT).delete();
} |
public static org.kie.pmml.api.models.MiningField convertToKieMiningField(final MiningField toConvert,
final Field<?> field) {
final String name = toConvert.getName() != null ?toConvert.getName() : null;
final FIELD_USAGE_TYPE fieldUsageType = toConvert.getUsageType() != null ?
FIELD_USAGE_TYPE.byName(toConvert.getUsageType().value()) : null;
final OP_TYPE opType = toConvert.getOpType() != null ? OP_TYPE.byName(toConvert.getOpType().value()) : null;
final DATA_TYPE dataType = field.getDataType() != null ?
DATA_TYPE.byName(field.getDataType().value()) : null;
final MISSING_VALUE_TREATMENT_METHOD missingValueTreatmentMethod =
toConvert.getMissingValueTreatment() != null ?
MISSING_VALUE_TREATMENT_METHOD.byName(toConvert.getMissingValueTreatment().value()) : null;
final INVALID_VALUE_TREATMENT_METHOD invalidValueTreatmentMethod =
toConvert.getInvalidValueTreatment() != null ?
INVALID_VALUE_TREATMENT_METHOD.byName(toConvert.getInvalidValueTreatment().value()) : null;
final String missingValueReplacement = toConvert.getMissingValueReplacement() != null ?
toConvert.getMissingValueReplacement().toString() : null;
final String invalidValueReplacement = toConvert.getInvalidValueReplacement() != null ?
toConvert.getInvalidValueReplacement().toString() : null;
final List<String> allowedValues = field instanceof DataField ?
convertDataFieldValues(((DataField) field).getValues()) : Collections.emptyList();
final List<org.kie.pmml.api.models.Interval> intervals = field instanceof DataField ?
convertDataFieldIntervals(((DataField) field).getIntervals()) : Collections.emptyList();
return new org.kie.pmml.api.models.MiningField(name,
fieldUsageType,
opType,
dataType,
missingValueTreatmentMethod,
invalidValueTreatmentMethod,
missingValueReplacement,
invalidValueReplacement,
allowedValues,
intervals);
} | @Test
void convertToKieMiningField() {
final String fieldName = "fieldName";
final MiningField.UsageType usageType = MiningField.UsageType.ACTIVE;
final MiningField toConvert = getMiningField(fieldName, usageType);
toConvert.setOpType(null);
final DataField dataField = getDataField(fieldName, OpType.CATEGORICAL, DataType.STRING);
org.kie.pmml.api.models.MiningField retrieved =
org.kie.pmml.compiler.api.utils.ModelUtils.convertToKieMiningField(toConvert, dataField);
assertThat(retrieved).isNotNull();
assertThat(retrieved.getName()).isEqualTo(fieldName);
assertThat(retrieved.getUsageType()).isEqualTo(FIELD_USAGE_TYPE.ACTIVE);
assertThat(retrieved.getDataType()).isEqualTo(DATA_TYPE.STRING);
assertThat(retrieved.getOpType()).isNull();
toConvert.setOpType(OpType.CATEGORICAL);
retrieved = org.kie.pmml.compiler.api.utils.ModelUtils.convertToKieMiningField(toConvert, dataField);
assertThat(retrieved.getOpType()).isEqualTo(OP_TYPE.CATEGORICAL);
} |
public static FunctionConfig validateUpdate(FunctionConfig existingConfig, FunctionConfig newConfig) {
FunctionConfig mergedConfig = existingConfig.toBuilder().build();
if (!existingConfig.getTenant().equals(newConfig.getTenant())) {
throw new IllegalArgumentException("Tenants differ");
}
if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) {
throw new IllegalArgumentException("Namespaces differ");
}
if (!existingConfig.getName().equals(newConfig.getName())) {
throw new IllegalArgumentException("Function Names differ");
}
if (!StringUtils.isEmpty(newConfig.getClassName())) {
mergedConfig.setClassName(newConfig.getClassName());
}
if (!StringUtils.isEmpty(newConfig.getJar())) {
mergedConfig.setJar(newConfig.getJar());
}
if (newConfig.getInputSpecs() == null) {
newConfig.setInputSpecs(new HashMap<>());
}
if (mergedConfig.getInputSpecs() == null) {
mergedConfig.setInputSpecs(new HashMap<>());
}
if (newConfig.getInputs() != null) {
newConfig.getInputs().forEach((topicName -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder().isRegexPattern(false).build());
}));
}
if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) {
newConfig.getInputSpecs().put(newConfig.getTopicsPattern(),
ConsumerConfig.builder()
.isRegexPattern(true)
.build());
}
if (newConfig.getCustomSerdeInputs() != null) {
newConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.serdeClassName(serdeClassName)
.isRegexPattern(false)
.build());
});
}
if (newConfig.getCustomSchemaInputs() != null) {
newConfig.getCustomSchemaInputs().forEach((topicName, schemaClassname) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.schemaType(schemaClassname)
.isRegexPattern(false)
.build());
});
}
if (!newConfig.getInputSpecs().isEmpty()) {
newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> {
if (!existingConfig.getInputSpecs().containsKey(topicName)) {
throw new IllegalArgumentException("Input Topics cannot be altered");
}
if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) {
throw new IllegalArgumentException(
"isRegexPattern for input topic " + topicName + " cannot be altered");
}
mergedConfig.getInputSpecs().put(topicName, consumerConfig);
});
}
if (!StringUtils.isEmpty(newConfig.getOutputSerdeClassName()) && !newConfig.getOutputSerdeClassName()
.equals(existingConfig.getOutputSerdeClassName())) {
throw new IllegalArgumentException("Output Serde mismatch");
}
if (!StringUtils.isEmpty(newConfig.getOutputSchemaType()) && !newConfig.getOutputSchemaType()
.equals(existingConfig.getOutputSchemaType())) {
throw new IllegalArgumentException("Output Schema mismatch");
}
if (!StringUtils.isEmpty(newConfig.getLogTopic())) {
mergedConfig.setLogTopic(newConfig.getLogTopic());
}
if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees()
.equals(existingConfig.getProcessingGuarantees())) {
throw new IllegalArgumentException("Processing Guarantees cannot be altered");
}
if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering()
.equals(existingConfig.getRetainOrdering())) {
throw new IllegalArgumentException("Retain Ordering cannot be altered");
}
if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering()
.equals(existingConfig.getRetainKeyOrdering())) {
throw new IllegalArgumentException("Retain Key Ordering cannot be altered");
}
if (!StringUtils.isEmpty(newConfig.getOutput())) {
mergedConfig.setOutput(newConfig.getOutput());
}
if (newConfig.getUserConfig() != null) {
mergedConfig.setUserConfig(newConfig.getUserConfig());
}
if (newConfig.getSecrets() != null) {
mergedConfig.setSecrets(newConfig.getSecrets());
}
if (newConfig.getRuntime() != null && !newConfig.getRuntime().equals(existingConfig.getRuntime())) {
throw new IllegalArgumentException("Runtime cannot be altered");
}
if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) {
throw new IllegalArgumentException("AutoAck cannot be altered");
}
if (newConfig.getMaxMessageRetries() != null) {
mergedConfig.setMaxMessageRetries(newConfig.getMaxMessageRetries());
}
if (!StringUtils.isEmpty(newConfig.getDeadLetterTopic())) {
mergedConfig.setDeadLetterTopic(newConfig.getDeadLetterTopic());
}
if (!StringUtils.isEmpty(newConfig.getSubName()) && !newConfig.getSubName()
.equals(existingConfig.getSubName())) {
throw new IllegalArgumentException("Subscription Name cannot be altered");
}
if (newConfig.getParallelism() != null) {
mergedConfig.setParallelism(newConfig.getParallelism());
}
if (newConfig.getResources() != null) {
mergedConfig
.setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources()));
}
if (newConfig.getWindowConfig() != null) {
mergedConfig.setWindowConfig(newConfig.getWindowConfig());
}
if (newConfig.getTimeoutMs() != null) {
mergedConfig.setTimeoutMs(newConfig.getTimeoutMs());
}
if (newConfig.getCleanupSubscription() != null) {
mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription());
}
if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) {
mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags());
}
if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) {
mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions());
}
if (newConfig.getProducerConfig() != null) {
mergedConfig.setProducerConfig(newConfig.getProducerConfig());
}
return mergedConfig;
} | @Test
public void testMergeDifferentLogTopic() {
FunctionConfig functionConfig = createFunctionConfig();
FunctionConfig newFunctionConfig = createUpdatedFunctionConfig("logTopic", "Different");
FunctionConfig mergedConfig = FunctionConfigUtils.validateUpdate(functionConfig, newFunctionConfig);
assertEquals(
mergedConfig.getLogTopic(),
"Different"
);
mergedConfig.setLogTopic(functionConfig.getLogTopic());
assertEquals(
new Gson().toJson(functionConfig),
new Gson().toJson(mergedConfig)
);
} |
public void setOtherBits(Bits bits) {
mOtherBits = bits;
} | @Test
public void setOtherBits() {
Mode mode = new Mode((short) 0000);
mode.setOtherBits(Mode.Bits.READ_EXECUTE);
assertEquals(Mode.Bits.READ_EXECUTE, mode.getOtherBits());
mode.setOtherBits(Mode.Bits.WRITE);
assertEquals(Mode.Bits.WRITE, mode.getOtherBits());
mode.setOtherBits(Mode.Bits.ALL);
assertEquals(Mode.Bits.ALL, mode.getOtherBits());
} |
public Collection<String> getAllShadowTableNames() {
return shadowTableRules.keySet();
} | @Test
void assertGetAllShadowTableNames() {
Collection<String> allShadowTableNames = shadowRule.getAllShadowTableNames();
assertThat(allShadowTableNames.size(), is(2));
Iterator<String> iterator = allShadowTableNames.iterator();
assertThat(iterator.next(), is("t_user"));
assertThat(iterator.next(), is("t_order"));
} |
public static <K, V> Map<K, V> fieldValueAsMap(Iterable<?> iterable, String fieldNameForKey, String fieldNameForValue) {
return IterUtil.fieldValueAsMap(IterUtil.getIter(iterable), fieldNameForKey, fieldNameForValue);
} | @Test
public void fieldValueAsMapTest() {
final List<TestBean> list = CollUtil.newArrayList(new TestBean("张三", 12, DateUtil.parse("2018-05-01")), //
new TestBean("李四", 13, DateUtil.parse("2018-03-01")), //
new TestBean("王五", 14, DateUtil.parse("2018-04-01"))//
);
final Map<String, Integer> map = CollUtil.fieldValueAsMap(list, "name", "age");
assertEquals(new Integer(12), map.get("张三"));
assertEquals(new Integer(13), map.get("李四"));
assertEquals(new Integer(14), map.get("王五"));
} |
@Override
public List<ServiceInstance> filter(String serviceName, List<ServiceInstance> serviceInstances) {
return serviceInstances;
} | @Test
public void filter() {
final DefaultServiceInstance instance = new DefaultServiceInstance("localhost", "127.0.0.1", 8080,
Collections.emptyMap(), "zk");
final InstanceFilter nopInstanceFilter = new NopInstanceFilter();
final List<ServiceInstance> defaultServiceInstances = Collections.singletonList(instance);
final List<ServiceInstance> test = nopInstanceFilter.filter("test", defaultServiceInstances);
Assert.assertEquals(test, defaultServiceInstances);
} |
public OpenAPI read(Class<?> cls) {
return read(cls, resolveApplicationPath(), null, false, null, null, new LinkedHashSet<String>(), new ArrayList<Parameter>(), new HashSet<Class<?>>());
} | @Test(description = "response generic subclass")
public void testTicket3082() {
Reader reader = new Reader(new OpenAPI());
OpenAPI openAPI = reader.read(ProcessTokenRestService.class);
String yaml = "openapi: 3.0.1\n" +
"paths:\n" +
" /token:\n" +
" post:\n" +
" operationId: create\n" +
" requestBody:\n" +
" content:\n" +
" application/json:\n" +
" schema:\n" +
" $ref: '#/components/schemas/ProcessTokenDTO'\n" +
" responses:\n" +
" default:\n" +
" description: default response\n" +
" content:\n" +
" application/json:\n" +
" schema:\n" +
" $ref: '#/components/schemas/ProcessTokenDTO'\n" +
"components:\n" +
" schemas:\n" +
" ProcessTokenDTO:\n" +
" type: object\n" +
" properties:\n" +
" guid:\n" +
" type: string\n";
SerializationMatchers.assertEqualsToYaml(openAPI, yaml);
} |
@Override
public long getOffsetInQueueByTime(String topic, int queueId, long timestamp, BoundaryType type) {
FlatMessageFile flatFile = flatFileStore.getFlatFile(new MessageQueue(topic, brokerName, queueId));
if (flatFile == null) {
return -1L;
}
return flatFile.getQueueOffsetByTimeAsync(timestamp, type).join();
} | @Test
public void testGetOffsetInQueueByTime() throws Exception {
this.getMessageFromTieredStoreTest();
mq = dispatcherTest.mq;
messageStore = dispatcherTest.messageStore;
storeConfig = dispatcherTest.storeConfig;
// message time is all 11
Assert.assertEquals(-1L, fetcher.getOffsetInQueueByTime(mq.getTopic(), 0, 10, BoundaryType.LOWER));
Assert.assertEquals(100L, fetcher.getOffsetInQueueByTime(mq.getTopic(), 1, 10, BoundaryType.LOWER));
Assert.assertEquals(100L, fetcher.getOffsetInQueueByTime(mq.getTopic(), 1, 11, BoundaryType.LOWER));
Assert.assertEquals(200L, fetcher.getOffsetInQueueByTime(mq.getTopic(), 1, 12, BoundaryType.LOWER));
Assert.assertEquals(100L, fetcher.getOffsetInQueueByTime(mq.getTopic(), 1, 10, BoundaryType.UPPER));
Assert.assertEquals(199L, fetcher.getOffsetInQueueByTime(mq.getTopic(), 1, 11, BoundaryType.UPPER));
Assert.assertEquals(200L, fetcher.getOffsetInQueueByTime(mq.getTopic(), 1, 12, BoundaryType.UPPER));
} |
@VisibleForTesting
@Override
List<String> cancelNonTerminalTasks(Workflow workflow) {
List<String> erroredTasks = new ArrayList<>();
// Update non-terminal tasks' status to CANCELED
for (Task task : workflow.getTasks()) {
if (!task.getStatus().isTerminal()) {
// Cancel the ones which are not completed yet....
task.setStatus(CANCELED);
// all of our tasks are system tasks.
Checks.checkTrue(
SystemTaskType.is(task.getTaskType()),
"Invalid task type [%s], all tasks should have a known maestro task type.",
task.getTaskType());
WorkflowSystemTask workflowSystemTask = WorkflowSystemTask.get(task.getTaskType());
try {
workflowSystemTask.cancel(workflow, task, this);
executionDAOFacade.updateTask(task); // only update if cancelled
} catch (Exception e) {
erroredTasks.add(task.getReferenceTaskName());
LOG.error(
"Error canceling system task:{}/{} in workflow: {}",
workflowSystemTask.getName(),
task.getTaskId(),
workflow.getWorkflowId(),
e);
}
}
}
if (erroredTasks.isEmpty()) {
try {
workflowStatusListener.onWorkflowFinalizedIfEnabled(workflow);
queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId());
} catch (Exception e) {
LOG.error("Error removing workflow: {} from decider queue", workflow.getWorkflowId(), e);
throw e; // we need to throw it to get at least once guarantee.
}
} else {
// also throw to retry errored tasks later.
throw new MaestroRetryableError(
"Error canceling tasks [%s] in workflow: [%s]", erroredTasks, workflow.getWorkflowId());
}
return erroredTasks;
} | @Test
public void testCancelNonTerminalTasksFailed() {
Task mockTask1 = mock(Task.class);
when(mockTask1.getTaskId()).thenReturn("task-id-1");
when(mockTask1.getTaskType()).thenReturn(Constants.MAESTRO_PREFIX);
when(mockTask1.getStatus()).thenReturn(Task.Status.IN_PROGRESS);
workflow.getTasks().add(mockTask1);
AssertHelper.assertThrows(
"All tasks should have a known maestro task type.",
IllegalArgumentException.class,
"Invalid task type",
() -> maestroWorkflowExecutor.cancelNonTerminalTasks(workflow));
when(mockTask1.getTaskType()).thenReturn(Constants.DEFAULT_START_STEP_NAME);
task1.setShouldThrow(true);
Task maestroTask = new Task();
maestroTask.setTaskId(UUID.randomUUID().toString());
maestroTask.setTaskType(Constants.MAESTRO_TASK_NAME);
maestroTask.setStatus(Task.Status.SCHEDULED);
workflow.getTasks().add(maestroTask);
AssertHelper.assertThrows(
"Cancel throws an exception will fail cancelNonTerminalTasks call.",
MaestroRetryableError.class,
"Error canceling tasks ",
() -> maestroWorkflowExecutor.cancelNonTerminalTasks(workflow));
ArgumentCaptor<Task> argumentCaptor = ArgumentCaptor.forClass(Task.class);
verify(executionDAOFacade, times(1)).updateTask(argumentCaptor.capture());
assertEquals(1, argumentCaptor.getAllValues().size());
assertEquals(Constants.MAESTRO_TASK_NAME, argumentCaptor.getAllValues().get(0).getTaskType());
assertEquals(Task.Status.CANCELED, argumentCaptor.getAllValues().get(0).getStatus());
verify(queueDAO, times(0)).remove(any(), any());
} |
static boolean allowCIDToUnicodeRange(Map.Entry<Integer, String> prev,
Map.Entry<Integer, String> next)
{
if (prev == null || next == null)
{
return false;
}
return allowCodeRange(prev.getKey(), next.getKey())
&& allowDestinationRange(prev.getValue(), next.getValue());
} | @Test
void testAllowCIDToUnicodeRange()
{
Map.Entry<Integer, String> six = new AbstractMap.SimpleEntry<>(0x03FF, "6");
Map.Entry<Integer, String> seven = new AbstractMap.SimpleEntry<>(0x0400,
"7");
Map.Entry<Integer, String> eight = new AbstractMap.SimpleEntry<>(0x0401,
"8");
assertFalse(ToUnicodeWriter.allowCIDToUnicodeRange(null, seven));
assertFalse(ToUnicodeWriter.allowCIDToUnicodeRange(six, null));
assertFalse(ToUnicodeWriter.allowCIDToUnicodeRange(six, seven));
assertTrue(ToUnicodeWriter.allowCIDToUnicodeRange(seven, eight));
} |
public SessionFactory build(HibernateBundle<?> bundle,
Environment environment,
PooledDataSourceFactory dbConfig,
List<Class<?>> entities) {
return build(bundle, environment, dbConfig, entities, DEFAULT_NAME);
} | @Test
void setsACustomPoolName() {
this.sessionFactory = factory.build(bundle, environment, config,
Collections.singletonList(Person.class), "custom-hibernate-db");
ArgumentCaptor<SessionFactoryManager> sessionFactoryManager = ArgumentCaptor.forClass(SessionFactoryManager.class);
verify(lifecycleEnvironment).manage(sessionFactoryManager.capture());
assertThat(sessionFactoryManager.getValue().getDataSource())
.isInstanceOfSatisfying(ManagedPooledDataSource.class, dataSource ->
assertThat(dataSource.getPool().getName()).isEqualTo("custom-hibernate-db"));
} |
@Subscribe
public void onChatMessage(ChatMessage chatMessage)
{
if (chatMessage.getType() != ChatMessageType.TRADE
&& chatMessage.getType() != ChatMessageType.GAMEMESSAGE
&& chatMessage.getType() != ChatMessageType.SPAM
&& chatMessage.getType() != ChatMessageType.FRIENDSCHATNOTIFICATION)
{
return;
}
String message = chatMessage.getMessage();
Matcher matcher = KILLCOUNT_PATTERN.matcher(message);
if (matcher.find())
{
final String boss = matcher.group("boss");
final int kc = Integer.parseInt(matcher.group("kc"));
final String pre = matcher.group("pre");
final String post = matcher.group("post");
if (Strings.isNullOrEmpty(pre) && Strings.isNullOrEmpty(post))
{
unsetKc(boss);
return;
}
String renamedBoss = KILLCOUNT_RENAMES
.getOrDefault(boss, boss)
// The config service doesn't support keys with colons in them
.replace(":", "");
if (boss != renamedBoss)
{
// Unset old TOB kc
unsetKc(boss);
unsetPb(boss);
unsetKc(boss.replace(":", "."));
unsetPb(boss.replace(":", "."));
// Unset old story mode
unsetKc("Theatre of Blood Story Mode");
unsetPb("Theatre of Blood Story Mode");
}
setKc(renamedBoss, kc);
// We either already have the pb, or need to remember the boss for the upcoming pb
if (lastPb > -1)
{
log.debug("Got out-of-order personal best for {}: {}", renamedBoss, lastPb);
if (renamedBoss.contains("Theatre of Blood"))
{
// TOB team size isn't sent in the kill message, but can be computed from varbits
int tobTeamSize = tobTeamSize();
lastTeamSize = tobTeamSize == 1 ? "Solo" : (tobTeamSize + " players");
}
else if (renamedBoss.contains("Tombs of Amascut"))
{
// TOA team size isn't sent in the kill message, but can be computed from varbits
int toaTeamSize = toaTeamSize();
lastTeamSize = toaTeamSize == 1 ? "Solo" : (toaTeamSize + " players");
}
final double pb = getPb(renamedBoss);
// If a raid with a team size, only update the pb if it is lower than the existing pb
// so that the pb is the overall lowest of any team size
if (lastTeamSize == null || pb == 0 || lastPb < pb)
{
log.debug("Setting overall pb (old: {})", pb);
setPb(renamedBoss, lastPb);
}
if (lastTeamSize != null)
{
log.debug("Setting team size pb: {}", lastTeamSize);
setPb(renamedBoss + " " + lastTeamSize, lastPb);
}
lastPb = -1;
lastTeamSize = null;
}
else
{
lastBossKill = renamedBoss;
lastBossTime = client.getTickCount();
}
return;
}
matcher = DUEL_ARENA_WINS_PATTERN.matcher(message);
if (matcher.find())
{
final int oldWins = getKc("Duel Arena Wins");
final int wins = matcher.group(2).equals("one") ? 1 :
Integer.parseInt(matcher.group(2).replace(",", ""));
final String result = matcher.group(1);
int winningStreak = getKc("Duel Arena Win Streak");
int losingStreak = getKc("Duel Arena Lose Streak");
if (result.equals("won") && wins > oldWins)
{
losingStreak = 0;
winningStreak += 1;
}
else if (result.equals("were defeated"))
{
losingStreak += 1;
winningStreak = 0;
}
else
{
log.warn("unrecognized duel streak chat message: {}", message);
}
setKc("Duel Arena Wins", wins);
setKc("Duel Arena Win Streak", winningStreak);
setKc("Duel Arena Lose Streak", losingStreak);
}
matcher = DUEL_ARENA_LOSSES_PATTERN.matcher(message);
if (matcher.find())
{
int losses = matcher.group(1).equals("one") ? 1 :
Integer.parseInt(matcher.group(1).replace(",", ""));
setKc("Duel Arena Losses", losses);
}
matcher = KILL_DURATION_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = NEW_PB_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = RAIDS_PB_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = RAIDS_DURATION_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = HS_PB_PATTERN.matcher(message);
if (matcher.find())
{
int floor = Integer.parseInt(matcher.group("floor"));
String floortime = matcher.group("floortime");
String floorpb = matcher.group("floorpb");
String otime = matcher.group("otime");
String opb = matcher.group("opb");
String pb = MoreObjects.firstNonNull(floorpb, floortime);
setPb("Hallowed Sepulchre Floor " + floor, timeStringToSeconds(pb));
if (otime != null)
{
pb = MoreObjects.firstNonNull(opb, otime);
setPb("Hallowed Sepulchre", timeStringToSeconds(pb));
}
}
matcher = HS_KC_FLOOR_PATTERN.matcher(message);
if (matcher.find())
{
int floor = Integer.parseInt(matcher.group(1));
int kc = Integer.parseInt(matcher.group(2).replaceAll(",", ""));
setKc("Hallowed Sepulchre Floor " + floor, kc);
}
matcher = HS_KC_GHC_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1).replaceAll(",", ""));
setKc("Hallowed Sepulchre", kc);
}
matcher = HUNTER_RUMOUR_KC_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1).replaceAll(",", ""));
setKc("Hunter Rumours", kc);
}
if (lastBossKill != null && lastBossTime != client.getTickCount())
{
lastBossKill = null;
lastBossTime = -1;
}
matcher = COLLECTION_LOG_ITEM_PATTERN.matcher(message);
if (matcher.find())
{
String item = matcher.group(1);
int petId = findPet(item);
if (petId != -1)
{
final List<Integer> petList = new ArrayList<>(getPetList());
if (!petList.contains(petId))
{
log.debug("New pet added: {}/{}", item, petId);
petList.add(petId);
setPetList(petList);
}
}
}
matcher = GUARDIANS_OF_THE_RIFT_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1));
setKc("Guardians of the Rift", kc);
}
} | @Test
public void testCoXKillNoPb()
{
ChatMessage chatMessage = new ChatMessage(null, FRIENDSCHATNOTIFICATION, "", "<col=ef20ff>Congratulations - your raid is complete!</col><br>Team size: <col=ff0000>11-15 players</col> Duration:</col> <col=ff0000>23:25.40</col> Personal best: </col><col=ff0000>20:19.20</col>", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Your completed Chambers of Xeric count is: <col=ff0000>52</col>.", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
verify(configManager).setRSProfileConfiguration("killcount", "chambers of xeric", 52);
verify(configManager).setRSProfileConfiguration("personalbest", "chambers of xeric", 20 * 60 + 19.2);
verify(configManager).setRSProfileConfiguration("personalbest", "chambers of xeric 11-15 players", 20 * 60 + 19.2);
} |
public void stop() {
try {
sharedHealthState.clearMine();
} catch (HazelcastInstanceNotActiveException | RetryableHazelcastException e) {
LOG.debug("Hazelcast is not active anymore", e);
}
} | @Test
void stop_whenThrowHazelcastInactiveException_shouldSilenceError() {
logging.setLevel(DEBUG);
SharedHealthState sharedHealthStateMock = mock(SharedHealthState.class);
doThrow(HazelcastInstanceNotActiveException.class).when(sharedHealthStateMock).clearMine();
HealthStateRefresher underTest = new HealthStateRefresher(executorService, nodeHealthProvider, sharedHealthStateMock);
underTest.stop();
assertThat(logging.getLogs(ERROR)).isEmpty();
assertThat(logging.hasLog(DEBUG, "Hazelcast is not active anymore")).isTrue();
} |
public TopicConsumerConfigurationData getMatchingTopicConfiguration(String topicName) {
return topicConfigurations.stream()
.filter(topicConf -> topicConf.getTopicNameMatcher().matches(topicName))
.findFirst()
.orElseGet(() -> TopicConsumerConfigurationData.ofTopicName(topicName, this));
} | @Test(dataProvider = "topicConf")
public void testTopicConsumerConfigurationData(String topicName, int expectedPriority) {
ConsumerConfigurationData<String> consumerConfigurationData = new ConsumerConfigurationData<>();
consumerConfigurationData.setPriorityLevel(1);
consumerConfigurationData.getTopicConfigurations()
.add(TopicConsumerConfigurationData.ofTopicsPattern(Pattern.compile("^foo$"), 2));
TopicConsumerConfigurationData topicConsumerConfigurationData =
consumerConfigurationData.getMatchingTopicConfiguration(topicName);
assertThat(topicConsumerConfigurationData.getPriorityLevel()).isEqualTo(expectedPriority);
} |
public static String getFileName(String source) {
return source.contains(File.separator) ?
source.substring(source.lastIndexOf(File.separatorChar) + 1) : source;
} | @Test
void getFileName() {
String fileName = "file_name.txt";
String source = fileName;
assertThat(FileNameUtils.getFileName(source)).isEqualTo(fileName);
source = File.separator + "dir" + File.separator + fileName;
assertThat(FileNameUtils.getFileName(source)).isEqualTo(fileName);
} |
public boolean createMetadataTable() {
GCRules.GCRule gcRules = GCRules.GCRULES.maxVersions(1);
if (tableAdminClient.exists(tableId)) {
Table table = tableAdminClient.getTable(tableId);
List<ColumnFamily> currentCFs = table.getColumnFamilies();
ModifyColumnFamiliesRequest request = ModifyColumnFamiliesRequest.of(tableId);
boolean needsNewColumnFamily = false;
for (String targetCF : COLUMN_FAMILIES) {
boolean exists = false;
for (ColumnFamily currentCF : currentCFs) {
if (targetCF.equals(currentCF.getId())) {
exists = true;
break;
}
}
if (!exists) {
needsNewColumnFamily = true;
request.addFamily(targetCF, gcRules);
}
}
if (needsNewColumnFamily) {
tableAdminClient.modifyFamilies(request);
}
return false;
}
CreateTableRequest createTableRequest = CreateTableRequest.of(tableId);
for (String cf : COLUMN_FAMILIES) {
createTableRequest.addFamily(cf, gcRules);
}
tableAdminClient.createTable(createTableRequest);
return true;
} | @Test
public void testCreateTableDoesNotExist() {
assertTrue(metadataTableAdminDao.createMetadataTable());
com.google.bigtable.admin.v2.ColumnFamily gcRule =
com.google.bigtable.admin.v2.ColumnFamily.newBuilder()
.setGcRule(GcRule.newBuilder().setMaxNumVersions(1).build())
.build();
List<ColumnFamily> expectedColumnFamilies = new ArrayList<>();
for (String columnFamilyName : COLUMN_FAMILIES) {
expectedColumnFamilies.add(ColumnFamily.fromProto(columnFamilyName, gcRule));
}
Table table = tableAdminClient.getTable(tableId);
assertThat(
table.getColumnFamilies(), Matchers.containsInAnyOrder(expectedColumnFamilies.toArray()));
} |
public String anonymize(final ParseTree tree) {
return build(tree);
} | @Test
public void shouldAnonymizeCreateSourceStreamQueryCorrectly() {
final String output = anon.anonymize(
"CREATE SOURCE STREAM my_stream (profileId VARCHAR, latitude DOUBLE, longitude DOUBLE)\n"
+ "WITH (kafka_topic='locations', value_format='json');");
Approvals.verify(output);
} |
SqlResult execute(CreateMappingPlan plan, SqlSecurityContext ssc) {
catalog.createMapping(plan.mapping(), plan.replace(), plan.ifNotExists(), ssc);
return UpdateSqlResultImpl.createUpdateCountResult(0);
} | @Test
public void test_insertExecution() {
// given
QueryId queryId = QueryId.create(UuidUtil.newSecureUUID());
DmlPlan plan = new DmlPlan(
Operation.INSERT,
planKey(),
QueryParameterMetadata.EMPTY,
emptySet(),
dag,
null,
false,
planExecutor,
Collections.emptyList(),
false,
null
);
given(hazelcastInstance.getJet()).willReturn(jetInstance);
given(jetInstance.newLightJob(eq(dag), isA(JobConfig.class), eq(null)))
.willReturn(job);
// when
SqlResult result = planExecutor.execute(plan, queryId, emptyList(), 0L, INSTANCE);
// then
assertThat(result.updateCount()).isEqualTo(0);
verify(job).join();
} |
public static UForLoop create(
Iterable<? extends UStatement> initializer,
@Nullable UExpression condition,
Iterable<? extends UExpressionStatement> update,
UStatement statement) {
return new AutoValue_UForLoop(
ImmutableList.copyOf(initializer),
condition,
ImmutableList.copyOf(update),
(USimpleStatement) statement);
} | @Test
public void equality() {
new EqualsTester()
.addEqualityGroup(
UForLoop.create(
ImmutableList.of(
UVariableDecl.create("i", UPrimitiveTypeTree.INT, UFreeIdent.create("from"))),
UBinary.create(Kind.LESS_THAN, ULocalVarIdent.create("i"), UFreeIdent.create("to")),
ImmutableList.of(
UExpressionStatement.create(
UUnary.create(Kind.POSTFIX_INCREMENT, ULocalVarIdent.create("i")))),
UBlock.create()))
.addEqualityGroup(
UForLoop.create(
ImmutableList.of(
UVariableDecl.create("i", UPrimitiveTypeTree.INT, UFreeIdent.create("from"))),
UBinary.create(Kind.LESS_THAN, ULocalVarIdent.create("i"), UFreeIdent.create("to")),
ImmutableList.<UExpressionStatement>of(),
UBlock.create(
UExpressionStatement.create(
UUnary.create(Kind.POSTFIX_INCREMENT, ULocalVarIdent.create("i"))))))
.testEquals();
} |
public ModelMBeanInfo getMBeanInfo(Object defaultManagedBean, Object customManagedBean, String objectName) throws JMException {
if ((defaultManagedBean == null && customManagedBean == null) || objectName == null)
return null;
// skip proxy classes
if (defaultManagedBean != null && Proxy.isProxyClass(defaultManagedBean.getClass())) {
LOGGER.trace("Skip creating ModelMBeanInfo due proxy class {}", defaultManagedBean.getClass());
return null;
}
// maps and lists to contain information about attributes and operations
Map<String, ManagedAttributeInfo> attributes = new LinkedHashMap<>();
Set<ManagedOperationInfo> operations = new LinkedHashSet<>();
Set<ModelMBeanAttributeInfo> mBeanAttributes = new LinkedHashSet<>();
Set<ModelMBeanOperationInfo> mBeanOperations = new LinkedHashSet<>();
Set<ModelMBeanNotificationInfo> mBeanNotifications = new LinkedHashSet<>();
// extract details from default managed bean
if (defaultManagedBean != null) {
extractAttributesAndOperations(defaultManagedBean.getClass(), attributes, operations);
extractMbeanAttributes(defaultManagedBean, attributes, mBeanAttributes, mBeanOperations);
extractMbeanOperations(defaultManagedBean, operations, mBeanOperations);
extractMbeanNotifications(defaultManagedBean, mBeanNotifications);
}
// extract details from custom managed bean
if (customManagedBean != null) {
extractAttributesAndOperations(customManagedBean.getClass(), attributes, operations);
extractMbeanAttributes(customManagedBean, attributes, mBeanAttributes, mBeanOperations);
extractMbeanOperations(customManagedBean, operations, mBeanOperations);
extractMbeanNotifications(customManagedBean, mBeanNotifications);
}
// create the ModelMBeanInfo
String name = getName(customManagedBean != null ? customManagedBean : defaultManagedBean, objectName);
String description = getDescription(customManagedBean != null ? customManagedBean : defaultManagedBean, objectName);
ModelMBeanAttributeInfo[] arrayAttributes = mBeanAttributes.toArray(new ModelMBeanAttributeInfo[mBeanAttributes.size()]);
ModelMBeanOperationInfo[] arrayOperations = mBeanOperations.toArray(new ModelMBeanOperationInfo[mBeanOperations.size()]);
ModelMBeanNotificationInfo[] arrayNotifications = mBeanNotifications.toArray(new ModelMBeanNotificationInfo[mBeanNotifications.size()]);
ModelMBeanInfo info = new ModelMBeanInfoSupport(name, description, arrayAttributes, null, arrayOperations, arrayNotifications);
LOGGER.trace("Created ModelMBeanInfo {}", info);
return info;
} | @Test
public void testInherited() throws JMException {
ModelMBeanInfo beanInfo = mbeanInfoAssembler.getMBeanInfo(new BadInherited(), null, "someName");
assertThat(beanInfo).isNotNull();
assertThat(beanInfo.getAttributes()).hasSize(2);
assertThat(beanInfo.getOperations()).hasSize(3);
} |
@CanIgnoreReturnValue
public final Ordered containsAtLeastEntriesIn(Multimap<?, ?> expectedMultimap) {
checkNotNull(expectedMultimap, "expectedMultimap");
checkNotNull(actual);
ListMultimap<?, ?> missing = difference(expectedMultimap, actual);
// TODO(kak): Possible enhancement: Include "[1 copy]" if the element does appear in
// the subject but not enough times. Similarly for unexpected extra items.
if (!missing.isEmpty()) {
failWithActual(
fact("missing", countDuplicatesMultimap(annotateEmptyStringsMultimap(missing))),
simpleFact("---"),
fact("expected to contain at least", annotateEmptyStringsMultimap(expectedMultimap)));
return ALREADY_FAILED;
}
return new MultimapInOrder(/* allowUnexpected = */ true, expectedMultimap);
} | @Test
public void containsAtLeastInOrderFailure() {
ImmutableMultimap<Integer, String> actual =
ImmutableMultimap.of(3, "one", 3, "six", 3, "two", 4, "five", 4, "four");
ImmutableMultimap<Integer, String> expected =
ImmutableMultimap.of(4, "four", 3, "six", 3, "two", 3, "one");
assertThat(actual).containsAtLeastEntriesIn(expected);
expectFailureWhenTestingThat(actual).containsAtLeastEntriesIn(expected).inOrder();
assertFailureKeys(
"contents match, but order was wrong",
"keys are not in order",
"keys with out-of-order values",
"---",
"expected to contain at least",
"but was");
assertFailureValue("keys with out-of-order values", "[3]");
assertFailureValue("expected to contain at least", "{4=[four], 3=[six, two, one]}");
assertFailureValue("but was", "{3=[one, six, two], 4=[five, four]}");
} |
@Override
public GlobalBeginResponseProto convert2Proto(GlobalBeginResponse globalBeginResponse) {
final short typeCode = globalBeginResponse.getTypeCode();
final AbstractMessageProto abstractMessage = AbstractMessageProto.newBuilder().setMessageType(
MessageTypeProto.forNumber(typeCode)).build();
final String msg = globalBeginResponse.getMsg();
final AbstractResultMessageProto abstractResultMessageProto = AbstractResultMessageProto.newBuilder().setMsg(
msg == null ? "" : msg).setResultCode(ResultCodeProto.valueOf(globalBeginResponse.getResultCode().name()))
.setAbstractMessage(abstractMessage).build();
final AbstractTransactionResponseProto abstractTransactionRequestProto = AbstractTransactionResponseProto
.newBuilder().setAbstractResultMessage(abstractResultMessageProto).setTransactionExceptionCode(
TransactionExceptionCodeProto.valueOf(globalBeginResponse.getTransactionExceptionCode().name()))
.build();
final String extraData = globalBeginResponse.getExtraData();
GlobalBeginResponseProto result = GlobalBeginResponseProto.newBuilder().setAbstractTransactionResponse(
abstractTransactionRequestProto).setExtraData(extraData == null ? "" : extraData).setXid(
globalBeginResponse.getXid()).build();
return result;
} | @Test
public void convert2Proto() {
GlobalBeginResponse globalBeginResponse = new GlobalBeginResponse();
globalBeginResponse.setResultCode(ResultCode.Failed);
globalBeginResponse.setMsg("msg");
globalBeginResponse.setExtraData("extraData");
globalBeginResponse.setXid("xid");
globalBeginResponse.setTransactionExceptionCode(TransactionExceptionCode.BranchRollbackFailed_Retriable);
GlobalBeginResponseConvertor convertor = new GlobalBeginResponseConvertor();
GlobalBeginResponseProto proto = convertor.convert2Proto(globalBeginResponse);
GlobalBeginResponse real = convertor.convert2Model(proto);
assertThat((real.getTypeCode())).isEqualTo(globalBeginResponse.getTypeCode());
assertThat((real.getMsg())).isEqualTo(globalBeginResponse.getMsg());
assertThat((real.getResultCode())).isEqualTo(globalBeginResponse.getResultCode());
assertThat((real.getTransactionExceptionCode())).isEqualTo(globalBeginResponse.getTransactionExceptionCode());
} |
@Override
public ValidationResult validate() {
final ValidationResult validationResult = new ValidationResult();
if (searchWithinMs() <= 0) {
validationResult.addError(FIELD_SEARCH_WITHIN_MS,
"Filter & Aggregation search_within_ms must be greater than 0.");
}
if (executeEveryMs() <= 0) {
validationResult.addError(FIELD_EXECUTE_EVERY_MS,
"Filter & Aggregation execute_every_ms must be greater than 0.");
}
if (!groupBy().isEmpty() && (series().isEmpty() || isConditionsEmpty())) {
validationResult.addError(FIELD_SERIES, "Aggregation with group_by must also contain series");
validationResult.addError(FIELD_CONDITIONS, "Aggregation with group_by must also contain conditions");
}
if (series().isEmpty() && !isConditionsEmpty()) {
validationResult.addError(FIELD_SERIES, "Aggregation with conditions must also contain series");
}
if (!series().isEmpty() && isConditionsEmpty()) {
validationResult.addError(FIELD_CONDITIONS, "Aggregation with series must also contain conditions");
}
series().stream()
.filter(ser -> ser instanceof HasField)
.forEach(ser -> {
final String field = ((HasField) ser).field();
if (field == null || field.isEmpty()) {
validationResult.addError(FIELD_SERIES, "Aggregation's series of type " + ser.type() + " must contain non-empty value for field");
}
});
if (useCronScheduling()) {
if (cronExpression() == null || cronExpression().isEmpty()) {
validationResult.addError(FIELD_CRON_EXPRESSION, "Cron expression must not be empty when using cron scheduling");
} else {
try {
CronUtils.validateExpression(cronExpression());
} catch (Exception e) {
validationResult.addError(FIELD_CRON_EXPRESSION, e.getMessage());
}
}
}
return validationResult;
} | @Test
public void testValidConfiguration() {
final ValidationResult validationResult = getConfig().validate();
assertThat(validationResult.failed()).isFalse();
assertThat(validationResult.getErrors().size()).isEqualTo(0);
} |
public static <K, V> WithKeys<K, V> of(SerializableFunction<V, K> fn) {
checkNotNull(
fn, "WithKeys constructed with null function. Did you mean WithKeys.of((Void) null)?");
return new WithKeys<>(fn, null);
} | @Test
@Category(NeedsRunner.class)
public void testConstantKeys() {
PCollection<String> input =
p.apply(Create.of(Arrays.asList(COLLECTION)).withCoder(StringUtf8Coder.of()));
PCollection<KV<Integer, String>> output = input.apply(WithKeys.of(100));
PAssert.that(output).containsInAnyOrder(WITH_CONST_KEYS);
p.run();
} |
public static AggregationUnit create(final AggregationType type, final boolean isDistinct) {
switch (type) {
case MAX:
return new ComparableAggregationUnit(false);
case MIN:
return new ComparableAggregationUnit(true);
case SUM:
return isDistinct ? new DistinctSumAggregationUnit() : new AccumulationAggregationUnit();
case COUNT:
return isDistinct ? new DistinctCountAggregationUnit() : new AccumulationAggregationUnit();
case AVG:
return isDistinct ? new DistinctAverageAggregationUnit() : new AverageAggregationUnit();
case BIT_XOR:
return new BitXorAggregationUnit();
default:
throw new UnsupportedSQLOperationException(type.name());
}
} | @Test
void assertCreateAverageAggregationUnit() {
assertThat(AggregationUnitFactory.create(AggregationType.AVG, false), instanceOf(AverageAggregationUnit.class));
} |
public Paragraph addNewParagraph(AuthenticationInfo authenticationInfo) {
return insertNewParagraph(paragraphs.size(), authenticationInfo);
} | @Test
void addParagraphWithLastReplNameTest() throws InterpreterNotFoundException {
when(interpreterFactory.getInterpreter(eq("spark"), any())).thenReturn(interpreter);
Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener, zConf, noteParser);
Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
p1.setText("%spark ");
Paragraph p2 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
assertEquals("%spark\n", p2.getText());
} |
public NodeModel commonAncestor() {
return commonAncestor;
} | @Test
public void oneLevelAncestor(){
final NodeModel parent = root();
final NodeModel node1 = new NodeModel("node1", map);
parent.insert(node1);
final NodeModel node2 = new NodeModel("node2", map);
parent.insert(node2);
NodeModel commonAncestor = new NodeRelativePath(node1, node2).commonAncestor();
assertThat(commonAncestor, equalTo(parent));
} |
public ShuffleDescriptor getShuffleDescriptor() {
return shuffleDescriptor;
} | @Test
void testSerializationWithNettyShuffleDescriptor() throws IOException {
ShuffleDescriptor shuffleDescriptor =
new NettyShuffleDescriptor(
producerLocation,
new NetworkPartitionConnectionInfo(address, connectionIndex),
resultPartitionID);
ResultPartitionDeploymentDescriptor copy =
createCopyAndVerifyResultPartitionDeploymentDescriptor(shuffleDescriptor);
assertThat(copy.getShuffleDescriptor()).isInstanceOf(NettyShuffleDescriptor.class);
NettyShuffleDescriptor shuffleDescriptorCopy =
(NettyShuffleDescriptor) copy.getShuffleDescriptor();
assertThat(resultPartitionID).isEqualTo(shuffleDescriptorCopy.getResultPartitionID());
assertThat(shuffleDescriptorCopy.isUnknown()).isFalse();
assertThat(shuffleDescriptorCopy.isLocalTo(producerLocation)).isTrue();
assertThat(connectionID).isEqualTo(shuffleDescriptorCopy.getConnectionId());
} |
@Override
public void close() throws IOException {
zkServiceManager.chooseService().close();
} | @Test
public void close() throws IOException {
zkDiscoveryClient.close();
Mockito.verify(zkService34, Mockito.times(1)).close();
} |
@NonNull
public Client authenticate(@NonNull Request request) {
// https://datatracker.ietf.org/doc/html/rfc7521#section-4.2
try {
if (!CLIENT_ASSERTION_TYPE_PRIVATE_KEY_JWT.equals(request.clientAssertionType())) {
throw new AuthenticationException(
"unsupported client_assertion_type='%s', expected '%s'"
.formatted(request.clientAssertionType(), CLIENT_ASSERTION_TYPE_PRIVATE_KEY_JWT));
}
var processor = new DefaultJWTProcessor<>();
var keySelector =
new JWSVerificationKeySelector<>(
Set.of(JWSAlgorithm.RS256, JWSAlgorithm.ES256), jwkSource);
processor.setJWSKeySelector(keySelector);
processor.setJWTClaimsSetVerifier(
new DefaultJWTClaimsVerifier<>(
new JWTClaimsSet.Builder().audience(baseUri.toString()).build(),
Set.of(
JWTClaimNames.JWT_ID,
JWTClaimNames.EXPIRATION_TIME,
JWTClaimNames.ISSUER,
JWTClaimNames.SUBJECT)));
var claims = processor.process(request.clientAssertion(), null);
var clientId = clientIdFromAssertion(request.clientId(), claims);
return new Client(clientId);
} catch (ParseException e) {
throw new AuthenticationException("failed to parse client assertion", e);
} catch (BadJOSEException | JOSEException e) {
throw new AuthenticationException("failed to verify client assertion", e);
}
} | @Test
void authenticate_missingJwtId() throws JOSEException {
var key = generateKey();
var jwkSource = new StaticJwkSource<>(key);
var claims =
new JWTClaimsSet.Builder()
.audience(RP_ISSUER.toString())
.subject("not the right client")
.issuer(CLIENT_ID)
.expirationTime(Date.from(Instant.now().plusSeconds(60)))
.build();
var signed = signJwt(claims, key);
var authenticator = new ClientAuthenticator(jwkSource, RP_ISSUER);
// when & then
assertThrows(
AuthenticationException.class,
() ->
authenticator.authenticate(
new Request(
CLIENT_ID, ClientAuthenticator.CLIENT_ASSERTION_TYPE_PRIVATE_KEY_JWT, signed)));
} |
@Override
public int getColumnLength(final Object value) {
throw new UnsupportedSQLOperationException("PostgreSQLInt4ArrayBinaryProtocolValue.getColumnLength()");
} | @Test
void assertGetColumnLength() {
assertThrows(UnsupportedSQLOperationException.class, () -> newInstance().getColumnLength("val"));
} |
protected void setJobProperties(Map<String, String> properties) throws DdlException {
// resource info
if (ConnectContext.get() != null) {
loadMemLimit = ConnectContext.get().getSessionVariable().getLoadMemLimit();
}
if (ConnectContext.get() != null) {
user = ConnectContext.get().getQualifiedUser();
}
// job properties
if (properties != null) {
if (properties.containsKey(LoadStmt.TIMEOUT_PROPERTY)) {
try {
timeoutSecond = Integer.parseInt(properties.get(LoadStmt.TIMEOUT_PROPERTY));
} catch (NumberFormatException e) {
throw new DdlException("Timeout is not INT", e);
}
}
if (properties.containsKey(LoadStmt.MAX_FILTER_RATIO_PROPERTY)) {
try {
maxFilterRatio = Double.parseDouble(properties.get(LoadStmt.MAX_FILTER_RATIO_PROPERTY));
} catch (NumberFormatException e) {
throw new DdlException("Max filter ratio is not DOUBLE", e);
}
}
if (properties.containsKey(LoadStmt.LOAD_DELETE_FLAG_PROPERTY)) {
throw new DdlException("delete flag is not supported");
}
if (properties.containsKey(LoadStmt.PARTIAL_UPDATE)) {
partialUpdate = Boolean.valueOf(properties.get(LoadStmt.PARTIAL_UPDATE));
}
if (properties.containsKey(LoadStmt.PARTIAL_UPDATE_MODE)) {
partialUpdateMode = properties.get(LoadStmt.PARTIAL_UPDATE_MODE);
}
if (properties.containsKey(LoadStmt.MERGE_CONDITION)) {
mergeCondition = properties.get(LoadStmt.MERGE_CONDITION);
}
if (properties.containsKey(LoadStmt.LOAD_MEM_LIMIT)) {
try {
loadMemLimit = Long.parseLong(properties.get(LoadStmt.LOAD_MEM_LIMIT));
} catch (NumberFormatException e) {
throw new DdlException("Execute memory limit is not Long", e);
}
}
if (properties.containsKey(LoadStmt.STRICT_MODE)) {
strictMode = Boolean.valueOf(properties.get(LoadStmt.STRICT_MODE));
}
if (properties.containsKey(LoadStmt.TIMEZONE)) {
timezone = properties.get(LoadStmt.TIMEZONE);
} else if (ConnectContext.get() != null) {
// get timezone for session variable
timezone = ConnectContext.get().getSessionVariable().getTimeZone();
}
if (properties.containsKey(LoadStmt.PRIORITY)) {
priority = LoadPriority.priorityByName(properties.get(LoadStmt.PRIORITY));
}
if (properties.containsKey(LoadStmt.LOG_REJECTED_RECORD_NUM)) {
logRejectedRecordNum = Long.parseLong(properties.get(LoadStmt.LOG_REJECTED_RECORD_NUM));
}
if (properties.containsKey(PropertyAnalyzer.PROPERTIES_WAREHOUSE)) {
String warehouseName = properties.get(PropertyAnalyzer.PROPERTIES_WAREHOUSE);
Warehouse warehouse = GlobalStateMgr.getCurrentState().getWarehouseMgr().getWarehouse(warehouseName);
if (warehouse == null) {
throw new DdlException("Warehouse " + warehouseName + " not exists.");
}
warehouseId = warehouse.getId();
}
if (properties.containsKey(LoadStmt.STRIP_OUTER_ARRAY)) {
jsonOptions.stripOuterArray = Boolean.parseBoolean(properties.get(LoadStmt.STRIP_OUTER_ARRAY));
}
if (properties.containsKey(LoadStmt.JSONPATHS)) {
jsonOptions.jsonPaths = properties.get(LoadStmt.JSONPATHS);
}
if (properties.containsKey(LoadStmt.JSONROOT)) {
jsonOptions.jsonRoot = properties.get(LoadStmt.JSONROOT);
}
}
} | @Test
public void testSetJobPropertiesWithErrorTimeout() {
Map<String, String> jobProperties = Maps.newHashMap();
jobProperties.put(LoadStmt.TIMEOUT_PROPERTY, "abc");
LoadJob loadJob = new BrokerLoadJob();
try {
loadJob.setJobProperties(jobProperties);
Assert.fail();
} catch (DdlException e) {
}
} |
public LogicalSchema resolve(final ExecutionStep<?> step, final LogicalSchema schema) {
return Optional.ofNullable(HANDLERS.get(step.getClass()))
.map(h -> h.handle(this, schema, step))
.orElseThrow(() -> new IllegalStateException("Unhandled step class: " + step.getClass()));
} | @Test
public void shouldResolveSchemaForTableSelect() {
// Given:
final TableSelect<?> step = new TableSelect<>(
PROPERTIES,
tableSource,
ImmutableList.of(),
ImmutableList.of(
add("JUICE", "ORANGE", "APPLE"),
ref("PLANTAIN", "BANANA"),
ref("CITRUS", "ORANGE")),
internalFormats
);
// When:
final LogicalSchema result = resolver.resolve(step, SCHEMA);
// Then:
assertThat(result, is(
LogicalSchema.builder()
.keyColumn(ColumnName.of("K0"), SqlTypes.INTEGER)
.valueColumn(ColumnName.of("JUICE"), SqlTypes.BIGINT)
.valueColumn(ColumnName.of("PLANTAIN"), SqlTypes.STRING)
.valueColumn(ColumnName.of("CITRUS"), SqlTypes.INTEGER)
.build())
);
} |
@Override
public void validTenant(Long id) {
TenantDO tenant = getTenant(id);
if (tenant == null) {
throw exception(TENANT_NOT_EXISTS);
}
if (tenant.getStatus().equals(CommonStatusEnum.DISABLE.getStatus())) {
throw exception(TENANT_DISABLE, tenant.getName());
}
if (DateUtils.isExpired(tenant.getExpireTime())) {
throw exception(TENANT_EXPIRE, tenant.getName());
}
} | @Test
public void testValidTenant_expired() {
// mock 数据
TenantDO tenant = randomPojo(TenantDO.class, o -> o.setId(1L).setStatus(CommonStatusEnum.ENABLE.getStatus())
.setExpireTime(buildTime(2020, 2, 2)));
tenantMapper.insert(tenant);
// 调用,并断言业务异常
assertServiceException(() -> tenantService.validTenant(1L), TENANT_EXPIRE, tenant.getName());
} |
public V put(final int key, final V value) {
final Entry<V>[] table = this.table;
final int index = HashUtil.indexFor(key, table.length, mask);
for (Entry<V> e = table[index]; e != null; e = e.hashNext) {
if (e.key == key) {
moveToTop(e);
return e.setValue(value);
}
}
final Entry<V> e = new Entry<>(key, value);
e.hashNext = table[index];
table[index] = e;
final Entry<V> top = this.top;
e.next = top;
if (top != null) {
top.previous = e;
} else {
back = e;
}
this.top = e;
_size += 1;
if (removeEldestEntry(back)) {
remove(back.key);
} else if (_size > capacity) {
rehash(HashUtil.nextCapacity(capacity));
}
return null;
} | @Test
public void keySet() {
final IntLinkedHashMap<String> tested = new IntLinkedHashMap<>();
for (int i = 0; i < 10000; ++i) {
tested.put(i, Integer.toString(i));
}
int i = 10000;
for (Integer key : tested.keySet()) {
Assert.assertEquals(--i, key.intValue());
}
} |
@Override
public PartialConfig load(File configRepoCheckoutDirectory, PartialConfigLoadContext context) {
File[] allFiles = getFiles(configRepoCheckoutDirectory, context);
// if context had changed files list then we could parse only new content
PartialConfig[] allFragments = parseFiles(allFiles);
PartialConfig partialConfig = new PartialConfig();
collectFragments(allFragments, partialConfig);
return partialConfig;
} | @Test
public void shouldLoadDirectoryWithTwoPipelineGroupsAndEnvironment() throws Exception {
GoConfigMother mother = new GoConfigMother();
PipelineGroups groups = mother.cruiseConfigWithTwoPipelineGroups().getGroups();
EnvironmentConfig env = EnvironmentConfigMother.environment("dev");
helper.addFileWithPipelineGroup("group1.gocd.xml", groups.get(0));
helper.addFileWithPipelineGroup("group2.gocd.xml", groups.get(1));
helper.addFileWithEnvironment("dev-env.gocd.xml", env);
PartialConfig part = xmlPartialProvider.load(tmpFolder, mock(PartialConfigLoadContext.class));
PipelineGroups groupsRead = part.getGroups();
assertThat(groupsRead.size(),is(2));
EnvironmentsConfig loadedEnvs = part.getEnvironments();
assertThat(loadedEnvs.size(),is(1));
assertThat(loadedEnvs.get(0),is(env));
} |
public ClusterStatus appendNewState(ClusterState state) {
return new ClusterStatus(
state,
createUpdatedHistoryWithNewState(state),
previousAttemptSummary,
currentAttemptSummary);
} | @Test
void testAppendNewState() {
ClusterStatus status = new ClusterStatus();
ClusterState newState = new ClusterState(ClusterStateSummary.RunningHealthy, "foo");
ClusterStatus newStatus = status.appendNewState(newState);
assertEquals(2, newStatus.getStateTransitionHistory().size());
assertEquals(newState, newStatus.getStateTransitionHistory().get(1L));
} |
public Map<String, String> getParameters() {
return urlParam.getParameters();
} | @Test
void testGetParameters() {
URL url = URL.valueOf(
"10.20.130.230:20880/context/path?interface=org.apache.dubbo.test.interfaceName&group=group&version=1.0.0");
Map<String, String> parameters = url.getParameters(i -> "version".equals(i));
String version = parameters.get("version");
assertEquals(1, parameters.size());
assertEquals("1.0.0", version);
} |
public static NotFoundException itemNotFound(long itemId) {
return new NotFoundException("item not found for itemId:%s",itemId);
} | @Test
public void testItemNotFoundException(){
NotFoundException exception = NotFoundException.itemNotFound(66);
assertEquals(exception.getMessage(), "item not found for itemId:66");
exception = NotFoundException.itemNotFound("test.key");
assertEquals(exception.getMessage(), "item not found for itemKey:test.key");
exception = NotFoundException.itemNotFound(appId, clusterName, namespaceName, "test.key");
assertEquals(exception.getMessage(), "item not found for appId:app-1001 clusterName:test namespaceName:application itemKey:test.key");
exception = NotFoundException.itemNotFound(appId, clusterName, namespaceName, 66);
assertEquals(exception.getMessage(), "item not found for appId:app-1001 clusterName:test namespaceName:application itemId:66");
} |
@Override
public ResultSet getExportedKeys(final String catalog, final String schema, final String table) throws SQLException {
return createDatabaseMetaDataResultSet(getDatabaseMetaData().getExportedKeys(getActualCatalog(catalog), getActualSchema(schema), getActualTable(getActualCatalog(catalog), table)));
} | @Test
void assertGetExportedKeys() throws SQLException {
when(databaseMetaData.getExportedKeys("test", null, null)).thenReturn(resultSet);
assertThat(shardingSphereDatabaseMetaData.getExportedKeys("test", null, null), instanceOf(DatabaseMetaDataResultSet.class));
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.